Compare commits

...

6 Commits

27 changed files with 917 additions and 353 deletions

View File

@ -64,15 +64,14 @@ from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
DrilldownTableView, DrilldownTableView,
ThresholdInfoModal, ThresholdInfoModal,
) )
from core.views.ui.insights import (
# from core.views.ui.insights import ( Insights,
# Insights, InsightsChannels,
# InsightsChannels, InsightsInfoModal,
# InsightsInfoModal, InsightsMeta,
# InsightsMeta, InsightsNicks,
# InsightsNicks, InsightsSearch,
# InsightsSearch, )
# )
urlpatterns = [ urlpatterns = [
path("__debug__/", include("debug_toolbar.urls")), path("__debug__/", include("debug_toolbar.urls")),
@ -103,12 +102,32 @@ urlpatterns = [
path("context/", DrilldownContextModal.as_view(), name="modal_context"), path("context/", DrilldownContextModal.as_view(), name="modal_context"),
path("context_table/", DrilldownContextModal.as_view(), name="modal_context_table"), path("context_table/", DrilldownContextModal.as_view(), name="modal_context_table"),
## ##
# path("ui/insights/", Insights.as_view(), name="insights"), path("ui/insights/index/<str:index>/", Insights.as_view(), name="insights"),
# path("ui/insights/search/", InsightsSearch.as_view(), name="search_insights"), path(
# path("ui/insights/channels/", InsightsChannels.as_view(), name="chans_insights"), "ui/insights/index/<str:index>/search/",
# path("ui/insights/nicks/", InsightsNicks.as_view(), name="nicks_insights"), InsightsSearch.as_view(),
# path("ui/insights/meta/", InsightsMeta.as_view(), name="meta_insights"), name="search_insights",
# path("ui/insights/modal/", InsightsInfoModal.as_view(), name="modal_insights"), ),
path(
"ui/insights/index/<str:index>/channels/",
InsightsChannels.as_view(),
name="chans_insights",
),
path(
"ui/insights/index/<str:index>/nicks/",
InsightsNicks.as_view(),
name="nicks_insights",
),
path(
"ui/insights/index/<str:index>/meta/",
InsightsMeta.as_view(),
name="meta_insights",
),
path(
"ui/insights/index/<str:index>/modal/",
InsightsInfoModal.as_view(),
name="modal_insights",
),
## ##
path( path(
"manage/threshold/irc/overview/", "manage/threshold/irc/overview/",

View File

@ -1,7 +1,12 @@
import os
import stripe import stripe
from django.conf import settings from django.conf import settings
from redis import StrictRedis from redis import StrictRedis
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
if settings.STRIPE_TEST: if settings.STRIPE_TEST:

View File

@ -2,7 +2,6 @@ import random
import string import string
import time import time
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from datetime import datetime
from math import floor, log10 from math import floor, log10
import orjson import orjson
@ -50,10 +49,6 @@ def dedup_list(data, check_keys):
return out return out
class QueryError(Exception):
pass
class StorageBackend(ABC): class StorageBackend(ABC):
def __init__(self, name): def __init__(self, name):
self.log = logs.get_logger(name) self.log = logs.get_logger(name)
@ -82,66 +77,6 @@ class StorageBackend(ABC):
def construct_query(self, **kwargs): def construct_query(self, **kwargs):
pass pass
@abstractmethod
def run_query(self, **kwargs):
pass
def parse_size(self, query_params, sizes):
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 15
return size
def parse_index(self, user, query_params, raise_error=False):
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.INDEX_MAIN
else:
if not user.has_perm(f"core.index_{index}"):
message = f"Not permitted to search by this index: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.INDEX_META
elif index == "internal":
index = settings.INDEX_INT
elif index == "restricted":
if not user.has_perm("core.restricted_sources"):
message = f"Not permitted to search by this index: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
index = settings.INDEX_RESTRICTED
else:
message = f"Index is not valid: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.INDEX_MAIN
return index
def parse_query(self, query_params, tags, size, custom_query, add_bool, **kwargs): def parse_query(self, query_params, tags, size, custom_query, add_bool, **kwargs):
query_created = False query_created = False
if "query" in query_params: if "query" in query_params:
@ -177,85 +112,9 @@ class StorageBackend(ABC):
message_class = "warning" message_class = "warning"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
def parse_source(self, user, query_params, raise_error=False): @abstractmethod
source = None def run_query(self, **kwargs):
if "source" in query_params: pass
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not user.has_perm("core.restricted_sources"):
message = f"Access denied: {source}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = f"Invalid source: {source}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
if "all" in sources:
sources.remove("all")
return sources
def parse_sort(self, query_params):
sort = None
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting == "asc":
sort = "ascending"
elif sorting == "desc":
sort = "descending"
return sort
def parse_date_time(self, query_params):
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
return (from_ts, to_ts)
return (None, None)
def parse_sentiment(self, query_params):
sentiment = None
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
return (sentiment_method, sentiment)
def filter_blacklisted(self, user, response): def filter_blacklisted(self, user, response):
""" """

View File

@ -6,6 +6,14 @@ from django.conf import settings
from core.db import StorageBackend, add_defaults from core.db import StorageBackend, add_defaults
from core.db.processing import parse_druid from core.db.processing import parse_druid
from core.lib.parsing import (
parse_date_time,
parse_index,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -155,12 +163,12 @@ class DruidBackend(StorageBackend):
else: else:
sizes = settings.MAIN_SIZES sizes = settings.MAIN_SIZES
if not size: if not size:
size = self.parse_size(query_params, sizes) size = parse_size(query_params, sizes)
if isinstance(size, dict): if isinstance(size, dict):
return size return size
# I - Index # I - Index
index = self.parse_index(request.user, query_params) index = parse_index(request.user, query_params)
if isinstance(index, dict): if isinstance(index, dict):
return index return index
@ -173,7 +181,7 @@ class DruidBackend(StorageBackend):
return search_query return search_query
# S - Sources # S - Sources
sources = self.parse_source(request.user, query_params) sources = parse_source(request.user, query_params)
if isinstance(sources, dict): if isinstance(sources, dict):
return sources return sources
total_count = len(sources) total_count = len(sources)
@ -182,20 +190,20 @@ class DruidBackend(StorageBackend):
add_in["src"] = sources add_in["src"] = sources
# R - Ranges # R - Ranges
from_ts, to_ts = self.parse_date_time(query_params) from_ts, to_ts = parse_date_time(query_params)
if from_ts: if from_ts:
addendum = f"{from_ts}/{to_ts}" addendum = f"{from_ts}/{to_ts}"
search_query["intervals"] = [addendum] search_query["intervals"] = [addendum]
# S - Sort # S - Sort
sort = self.parse_sort(query_params) sort = parse_sort(query_params)
if isinstance(sort, dict): if isinstance(sort, dict):
return sort return sort
if sort: if sort:
search_query["order"] = sort search_query["order"] = sort
# S - Sentiment # S - Sentiment
sentiment_r = self.parse_sentiment(query_params) sentiment_r = parse_sentiment(query_params)
if isinstance(sentiment_r, dict): if isinstance(sentiment_r, dict):
return sentiment_r return sentiment_r
if sentiment_r: if sentiment_r:

View File

@ -10,6 +10,15 @@ from core.db import StorageBackend, add_defaults
# from json import dumps # from json import dumps
# pp = lambda x: print(dumps(x, indent=2)) # pp = lambda x: print(dumps(x, indent=2))
from core.db.processing import parse_results from core.db.processing import parse_results
from core.lib.parsing import (
QueryError,
parse_date_time,
parse_index,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
class ElasticsearchBackend(StorageBackend): class ElasticsearchBackend(StorageBackend):
@ -126,14 +135,16 @@ class ElasticsearchBackend(StorageBackend):
) )
return query return query
def construct_query(self, query, size, blank=False): def construct_query(self, query, size=None, blank=False):
""" """
Accept some query parameters and construct an Elasticsearch query. Accept some query parameters and construct an Elasticsearch query.
""" """
query_base = { query_base = {
"size": size, # "size": size,
"query": {"bool": {"must": []}}, "query": {"bool": {"must": []}},
} }
if size:
query_base["size"] = size
query_string = { query_string = {
"query_string": { "query_string": {
"query": query, "query": query,
@ -163,8 +174,8 @@ class ElasticsearchBackend(StorageBackend):
query_base["query"]["bool"]["must"].append(query_string) query_base["query"]["bool"]["must"].append(query_string)
return query_base return query_base
def parse(self, response): def parse(self, response, **kwargs):
parsed = parse_results(response) parsed = parse_results(response, **kwargs)
return parsed return parsed
def run_query(self, user, search_query, **kwargs): def run_query(self, user, search_query, **kwargs):
@ -186,6 +197,127 @@ class ElasticsearchBackend(StorageBackend):
return err return err
return response return response
async def async_run_query(self, user, search_query, **kwargs):
"""
Low level helper to run an ES query.
Accept a user to pass it to the filter, so we can
avoid filtering for superusers.
Accept fields and size, for the fields we want to match and the
number of results to return.
"""
index = kwargs.get("index")
try:
response = self.client.search(body=search_query, index=index)
except RequestError as err:
print("Elasticsearch error", err)
return err
except NotFoundError as err:
print("Elasticsearch error", err)
return err
return response
async def schedule_query_results(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality and async.
"""
data = rule_object.parsed
if "tags" in data:
tags = data["tags"]
else:
tags = []
if "query" in data:
query = data["query"][0]
data["query"] = query
result_map = {}
add_bool = []
add_top = []
if "source" in data:
total_count = len(data["source"])
total_sources = len(settings.MAIN_SOURCES) + len(
settings.SOURCES_RESTRICTED
)
if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}}
for source_iter in data["source"]:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}}
)
add_top.append(add_top_tmp)
for field, values in data.items():
if field not in ["source", "index", "tags", "query", "sentiment"]:
for value in values:
add_top.append({"match": {field: value}})
search_query = self.parse_query(data, tags, None, False, add_bool)
self.add_bool(search_query, add_bool)
self.add_top(search_query, add_top)
if "sentiment" in data:
search_query["aggs"] = {
"avg_sentiment": {
"avg": {"field": "sentiment"},
}
}
for index in data["index"]:
if "message" in search_query:
self.log.error(f"Error parsing query: {search_query['message']}")
continue
response = await self.async_run_query(
rule_object.user,
search_query,
index=index,
)
if isinstance(response, Exception):
error = response.info["error"]["root_cause"][0]["reason"]
self.log.error(f"Error running scheduled search: {error}")
raise QueryError(error)
if len(response["hits"]["hits"]) == 0:
# No results, skip
continue
aggs, response = self.parse(response, aggs=True)
if "message" in response:
self.log.error(f"Error running scheduled search: {response['message']}")
continue
result_map[index] = (aggs, response)
# Average aggregation check
# Could probably do this in elasticsearch
for index, (aggs, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in aggs:
agg_value = aggs[agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0][agg_name]["match"] = match
return result_map
def query_results( def query_results(
self, self,
request, request,
@ -224,12 +356,12 @@ class ElasticsearchBackend(StorageBackend):
else: else:
sizes = settings.MAIN_SIZES sizes = settings.MAIN_SIZES
if not size: if not size:
size = self.parse_size(query_params, sizes) size = parse_size(query_params, sizes)
if isinstance(size, dict): if isinstance(size, dict):
return size return size
# I - Index # I - Index
index = self.parse_index(request.user, query_params) index = parse_index(request.user, query_params)
if isinstance(index, dict): if isinstance(index, dict):
return index return index
@ -242,7 +374,7 @@ class ElasticsearchBackend(StorageBackend):
return search_query return search_query
# S - Sources # S - Sources
sources = self.parse_source(request.user, query_params) sources = parse_source(request.user, query_params)
if isinstance(sources, dict): if isinstance(sources, dict):
return sources return sources
total_count = len(sources) total_count = len(sources)
@ -257,7 +389,7 @@ class ElasticsearchBackend(StorageBackend):
# R - Ranges # R - Ranges
# date_query = False # date_query = False
from_ts, to_ts = self.parse_date_time(query_params) from_ts, to_ts = parse_date_time(query_params)
if from_ts: if from_ts:
range_query = { range_query = {
"range": { "range": {
@ -270,7 +402,7 @@ class ElasticsearchBackend(StorageBackend):
add_top.append(range_query) add_top.append(range_query)
# S - Sort # S - Sort
sort = self.parse_sort(query_params) sort = parse_sort(query_params)
if isinstance(sort, dict): if isinstance(sort, dict):
return sort return sort
if sort: if sort:
@ -286,7 +418,7 @@ class ElasticsearchBackend(StorageBackend):
search_query["sort"] = sorting search_query["sort"] = sorting
# S - Sentiment # S - Sentiment
sentiment_r = self.parse_sentiment(query_params) sentiment_r = parse_sentiment(query_params)
if isinstance(sentiment_r, dict): if isinstance(sentiment_r, dict):
return sentiment_r return sentiment_r
if sentiment_r: if sentiment_r:

View File

@ -58,7 +58,7 @@ def annotate_results(results):
item["num_chans"] = num_chans[item["nick"]] item["num_chans"] = num_chans[item["nick"]]
def parse_results(results): def parse_results(results, aggs):
results_parsed = [] results_parsed = []
stringify = ["host", "channel"] stringify = ["host", "channel"]
if "hits" in results.keys(): if "hits" in results.keys():
@ -110,6 +110,14 @@ def parse_results(results):
else: else:
element["time"] = time element["time"] = time
results_parsed.append(element) results_parsed.append(element)
if aggs:
aggregations = {}
if "aggregations" in results:
for field in ["avg_sentiment"]: # Add other number fields here
if field in results["aggregations"]:
aggregations[field] = results["aggregations"][field]
return (aggregations, results_parsed)
return results_parsed return results_parsed

View File

@ -3,15 +3,15 @@ from django.contrib.auth.forms import UserCreationForm
from django.core.exceptions import FieldDoesNotExist from django.core.exceptions import FieldDoesNotExist
from django.forms import ModelForm from django.forms import ModelForm
from core.db import QueryError from core.db.storage import db
from core.lib.rules import NotificationRuleData from core.lib.parsing import QueryError
from core.lib.rules import NotificationRuleData, RuleParseError
from .models import NotificationRule, NotificationSettings, User from .models import NotificationRule, NotificationSettings, User
# from django.forms import ModelForm # flake8: noqa: E501
# Create your forms here.
class RestrictedFormMixin: class RestrictedFormMixin:
""" """
This mixin is used to restrict the queryset of a form to the current user. This mixin is used to restrict the queryset of a form to the current user.
@ -89,25 +89,30 @@ class NotificationRuleForm(RestrictedFormMixin, ModelForm):
fields = ( fields = (
"name", "name",
"data", "data",
"interval",
"window",
"priority", "priority",
"topic", "topic",
"enabled", "enabled",
) )
help_texts = { help_texts = {
"name": "The name of the rule.", "name": "The name of the rule.",
"priority": "The priority of the rule.", "priority": "The notification priority of the rule.",
"topic": "The topic to send notifications to. Leave blank for default.", "topic": "The topic to send notifications to. Leave blank for default.",
"enabled": "Whether the rule is enabled.", "enabled": "Whether the rule is enabled.",
"data": "The notification rule definition.", "data": "The notification rule definition.",
"interval": "How often to run the search. On demand evaluates messages as they are received, without running a scheduled search. The remaining options schedule a search of the database with the window below.",
"window": "Time window to search: 1d, 1h, 1m, 1s, etc.",
} }
def clean(self): def clean(self):
cleaned_data = super(NotificationRuleForm, self).clean() cleaned_data = super(NotificationRuleForm, self).clean()
data = cleaned_data.get("data")
try: try:
parsed_data = NotificationRuleData(self.request.user, data) # Passing db to avoid circular import
except ValueError as e: parsed_data = NotificationRuleData(self.request.user, cleaned_data, db=db)
self.add_error("data", f"Parsing error: {e}") parsed_data.test_schedule()
except RuleParseError as e:
self.add_error(e.field, f"Parsing error: {e}")
return return
except QueryError as e: except QueryError as e:
self.add_error("data", f"Query error: {e}") self.add_error("data", f"Query error: {e}")

View File

@ -3,7 +3,7 @@ from math import ceil
from django.conf import settings from django.conf import settings
from numpy import array_split from numpy import array_split
from core.db.elastic import client, run_main_query from core.db.storage import db
def construct_query(net, nicks): def construct_query(net, nicks):
@ -43,27 +43,14 @@ def get_meta(request, net, nicks, iter=True):
break break
meta_tmp = [] meta_tmp = []
query = construct_query(net, nicks_chunked) query = construct_query(net, nicks_chunked)
results = run_main_query( results = db.query(
client,
request.user, request.user,
query, query,
custom_query=True, index=settings.INDEX_META,
index=settings.ELASTICSEARCH_INDEX_META,
) )
if "hits" in results.keys(): if "object_list" in results.keys():
if "hits" in results["hits"]: for element in results["object_list"]:
for item in results["hits"]["hits"]: meta_tmp.append(element)
element = item["_source"]
element["id"] = item["_id"]
# Split the timestamp into date and time
ts = element["ts"]
ts_spl = ts.split("T")
date = ts_spl[0]
time = ts_spl[1]
element["date"] = date
element["time"] = time
meta_tmp.append(element)
for x in meta_tmp: for x in meta_tmp:
if x not in meta: if x not in meta:
meta.append(x) meta.append(x)

View File

@ -3,7 +3,7 @@ from math import ceil
from django.conf import settings from django.conf import settings
from numpy import array_split from numpy import array_split
from core.lib.druid import client, run_main_query from core.db.storage import db
def construct_query(net, nicks): def construct_query(net, nicks):
@ -45,7 +45,7 @@ def get_nicks(request, net, nicks, iter=True):
if len(nicks_chunked) == 0: if len(nicks_chunked) == 0:
break break
query = construct_query(net, nicks_chunked) query = construct_query(net, nicks_chunked)
results = run_main_query(client, request.user, query, custom_query=True) results = db.query(request.user, query)
if "hits" in results.keys(): if "hits" in results.keys():
if "hits" in results["hits"]: if "hits" in results["hits"]:
for item in results["hits"]["hits"]: for item in results["hits"]["hits"]:

149
core/lib/parsing.py Normal file
View File

@ -0,0 +1,149 @@
from datetime import datetime
from django.conf import settings
class QueryError(Exception):
pass
def parse_size(query_params, sizes):
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 15
return size
def parse_index(user, query_params, raise_error=False):
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.INDEX_MAIN
else:
if not user.has_perm(f"core.index_{index}"):
message = f"Not permitted to search by this index: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.INDEX_META
elif index == "internal":
index = settings.INDEX_INT
elif index == "restricted":
if not user.has_perm("core.restricted_sources"):
message = f"Not permitted to search by this index: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
index = settings.INDEX_RESTRICTED
else:
message = f"Index is not valid: {index}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.INDEX_MAIN
return index
def parse_source(user, query_params, raise_error=False):
source = None
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not user.has_perm("core.restricted_sources"):
message = f"Access denied: {source}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = f"Invalid source: {source}"
if raise_error:
raise QueryError(message)
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
if "all" in sources:
sources.remove("all")
return sources
def parse_sort(query_params):
sort = None
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting == "asc":
sort = "ascending"
elif sorting == "desc":
sort = "descending"
return sort
def parse_date_time(query_params):
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
return (from_ts, to_ts)
return (None, None)
def parse_sentiment(query_params):
sentiment = None
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
return (sentiment_method, sentiment)

View File

@ -2,20 +2,30 @@ from yaml import dump, load
from yaml.parser import ParserError from yaml.parser import ParserError
from yaml.scanner import ScannerError from yaml.scanner import ScannerError
from core.db.storage import db
from core.models import NotificationRule
try: try:
from yaml import CDumper as Dumper from yaml import CDumper as Dumper
from yaml import CLoader as Loader from yaml import CLoader as Loader
except ImportError: except ImportError:
from yaml import Loader, Dumper from yaml import Loader, Dumper
from asgiref.sync import async_to_sync
from core.lib.notify import sendmsg from core.lib.notify import sendmsg
from core.lib.parsing import parse_index, parse_source
from core.util import logs from core.util import logs
log = logs.get_logger("rules") log = logs.get_logger("rules")
SECONDS_PER_UNIT = {"s": 1, "m": 60, "h": 3600, "d": 86400, "w": 604800}
MAX_WINDOW = 2592000
class RuleParseError(Exception):
def __init__(self, message, field):
super().__init__(message)
self.field = field
def rule_matched(rule, message, matched): def rule_matched(rule, message, matched):
title = f"Rule {rule.name} matched" title = f"Rule {rule.name} matched"
@ -36,76 +46,195 @@ def rule_matched(rule, message, matched):
sendmsg(rule.user, notify_message, **cast) sendmsg(rule.user, notify_message, **cast)
def process_rules(data):
all_rules = NotificationRule.objects.filter(enabled=True)
for index, index_messages in data.items():
for message in index_messages:
for rule in all_rules:
parsed_rule = rule.parse()
matched = {}
if "index" not in parsed_rule:
continue
if "source" not in parsed_rule:
continue
rule_index = parsed_rule["index"]
rule_source = parsed_rule["source"]
if not type(rule_index) == list:
rule_index = [rule_index]
if not type(rule_source) == list:
rule_source = [rule_source]
if index not in rule_index:
continue
if message["src"] not in rule_source:
continue
matched["index"] = index
matched["source"] = message["src"]
rule_field_length = len(parsed_rule.keys())
matched_field_number = 0
for field, value in parsed_rule.items():
if not type(value) == list:
value = [value]
if field == "src":
continue
if field == "tokens":
for token in value:
if "tokens" in message:
if token in message["tokens"]:
matched_field_number += 1
matched[field] = token
# Break out of the token matching loop
break
# Continue to next field
continue
# Allow partial matches for msg
if field == "msg":
for msg in value:
if "msg" in message:
if msg.lower() in message["msg"].lower():
matched_field_number += 1
matched[field] = msg
# Break out of the msg matching loop
break
# Continue to next field
continue
if field in message and message[field] in value:
matched_field_number += 1
matched[field] = message[field]
if matched_field_number == rule_field_length - 2:
rule_matched(rule, message, matched)
class NotificationRuleData(object): class NotificationRuleData(object):
def __init__(self, user, data): def __init__(self, user, cleaned_data, db):
self.user = user self.user = user
self.data = data self.object = None
# We are running live
if not isinstance(cleaned_data, dict):
self.object = cleaned_data
cleaned_data = cleaned_data.__dict__
self.cleaned_data = cleaned_data
self.db = db
self.data = self.cleaned_data.get("data")
self.parsed = None self.parsed = None
self.aggs = {}
self.validate_user_permissions()
self.parse_data() self.parse_data()
self.ensure_list()
self.validate_permissions() self.validate_permissions()
self.validate_schedule_fields()
self.validate_time_fields()
def store_match(self, index, match):
"""
Store a match result.
"""
if self.object.match is None:
self.object.match = {}
if not isinstance(self.object.match, dict):
self.object.match = {}
self.object.match[index] = match
self.object.save()
log.debug(f"Stored match: {index} - {match}")
async def run_schedule(self):
"""
Run the schedule query.
"""
if self.db:
response = await self.db.schedule_query_results(self)
for index, (aggs, results) in response.items():
if not results:
self.store_match(index, False)
aggs_for_index = []
for agg_name in self.aggs.keys():
if agg_name in aggs:
if "match" in aggs[agg_name]:
aggs_for_index.append(aggs[agg_name]["match"])
# All required aggs are present
if len(aggs_for_index) == len(self.aggs.keys()):
if all(aggs_for_index):
self.store_match(index, True)
continue
self.store_match(index, False)
def test_schedule(self):
"""
Test the schedule query to ensure it is valid.
"""
if self.db:
sync_schedule = async_to_sync(self.db.schedule_query_results)
sync_schedule(self)
def validate_schedule_fields(self):
"""
Ensure schedule fields are valid.
index: can be a list, it will schedule one search per index.
source: can be a list, it will be the filter for each search.
tokens: can be list, it will ensure the message matches any token.
msg: can be a list, it will ensure the message contains any msg.
No other fields can be lists containing more than one item.
"""
is_schedule = self.is_schedule
if is_schedule:
allowed_list_fields = ["index", "source", "tokens", "msg"]
for field, value in self.parsed.items():
if field not in allowed_list_fields:
if len(value) > 1:
raise RuleParseError(
(
f"For scheduled rules, field {field} cannot contain "
"more than one item"
),
"data",
)
if len(str(value[0])) == 0:
raise RuleParseError(f"Field {field} cannot be empty", "data")
if "sentiment" in self.parsed:
sentiment = str(self.parsed["sentiment"][0])
sentiment = sentiment.strip()
if sentiment[0] not in [">", "<", "="]:
raise RuleParseError(
(
"Sentiment field must be a comparison operator and then a "
"float: >0.02"
),
"data",
)
operator = sentiment[0]
number = sentiment[1:]
try:
number = float(number)
except ValueError:
raise RuleParseError(
(
"Sentiment field must be a comparison operator and then a "
"float: >0.02"
),
"data",
)
self.aggs["avg_sentiment"] = (operator, number)
else:
if "query" in self.parsed:
raise RuleParseError(
"Field query cannot be used with on-demand rules", "data"
)
if "tags" in self.parsed:
raise RuleParseError(
"Field tags cannot be used with on-demand rules", "data"
)
@property
def is_schedule(self):
if "interval" in self.cleaned_data:
if self.cleaned_data["interval"] != 0:
return True
return False
def ensure_list(self):
"""
Ensure all values are lists.
"""
for field, value in self.parsed.items():
if not isinstance(value, list):
self.parsed[field] = [value]
def validate_user_permissions(self):
"""
Ensure the user can use notification rules.
"""
if not self.user.has_perm("core.use_rules"):
raise RuleParseError("User does not have permission to use rules", "data")
def validate_time_fields(self):
"""
Validate the interval and window fields.
Prohibit window being specified with an ondemand interval.
"""
interval = self.cleaned_data.get("interval")
window = self.cleaned_data.get("window")
if interval == 0 and window is not None:
raise RuleParseError(
"Window cannot be specified with on-demand interval", "window"
)
if interval is not None and window is None:
raise RuleParseError(
"Window must be specified with non-on-demand interval", "window"
)
if window is not None:
window_number = window[:-1]
if not window_number.isdigit():
raise RuleParseError("Window prefix must be a number", "window")
window_number = int(window_number)
window_unit = window[-1]
if window_unit not in SECONDS_PER_UNIT:
raise RuleParseError(
(
"Window unit must be one of "
f"{', '.join(SECONDS_PER_UNIT.keys())},"
f" not '{window_unit}'"
),
"window",
)
window_seconds = window_number * SECONDS_PER_UNIT[window_unit]
if window_seconds > MAX_WINDOW:
raise RuleParseError(
f"Window cannot be larger than {MAX_WINDOW} seconds (30 days)",
"window",
)
def validate_permissions(self): def validate_permissions(self):
""" """
@ -115,24 +244,24 @@ class NotificationRuleData(object):
index = self.parsed["index"] index = self.parsed["index"]
if type(index) == list: if type(index) == list:
for i in index: for i in index:
db.parse_index(self.user, {"index": i}, raise_error=True) parse_index(self.user, {"index": i}, raise_error=True)
else: # else:
db.parse_index(self.user, {"index": index}, raise_error=True) # db.parse_index(self.user, {"index": index}, raise_error=True)
else: else:
# Get the default value for the user if not present # Get the default value for the user if not present
index = db.parse_index(self.user, {}, raise_error=True) index = parse_index(self.user, {}, raise_error=True)
self.parsed["index"] = index self.parsed["index"] = index
if "source" in self.parsed: if "source" in self.parsed:
source = self.parsed["source"] source = self.parsed["source"]
if type(source) == list: if type(source) == list:
for i in source: for i in source:
db.parse_source(self.user, {"source": i}, raise_error=True) parse_source(self.user, {"source": i}, raise_error=True)
else: # else:
db.parse_source(self.user, {"source": source}, raise_error=True) # parse_source(self.user, {"source": source}, raise_error=True)
else: else:
# Get the default value for the user if not present # Get the default value for the user if not present
source = db.parse_source(self.user, {}, raise_error=True) source = parse_source(self.user, {}, raise_error=True)
self.parsed["source"] = source self.parsed["source"] = source
def parse_data(self): def parse_data(self):
@ -142,7 +271,7 @@ class NotificationRuleData(object):
try: try:
self.parsed = load(self.data, Loader=Loader) self.parsed = load(self.data, Loader=Loader)
except (ScannerError, ParserError) as e: except (ScannerError, ParserError) as e:
raise ValueError(f"Invalid YAML: {e}") raise RuleParseError("data", f"Invalid YAML: {e}")
def __str__(self): def __str__(self):
return dump(self.parsed, Dumper=Dumper) return dump(self.parsed, Dumper=Dumper)

View File

@ -2,12 +2,75 @@ import msgpack
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from redis import StrictRedis from redis import StrictRedis
from core.lib.rules import process_rules from core.lib.rules import rule_matched
from core.models import NotificationRule
from core.util import logs from core.util import logs
log = logs.get_logger("processing") log = logs.get_logger("processing")
def process_rules(data):
all_rules = NotificationRule.objects.filter(enabled=True)
for index, index_messages in data.items():
for message in index_messages:
for rule in all_rules:
parsed_rule = rule.parse()
matched = {}
if "index" not in parsed_rule:
continue
if "source" not in parsed_rule:
continue
rule_index = parsed_rule["index"]
rule_source = parsed_rule["source"]
# if not type(rule_index) == list:
# rule_index = [rule_index]
# if not type(rule_source) == list:
# rule_source = [rule_source]
if index not in rule_index:
continue
if message["src"] not in rule_source:
continue
matched["index"] = index
matched["source"] = message["src"]
rule_field_length = len(parsed_rule.keys())
matched_field_number = 0
for field, value in parsed_rule.items():
# if not type(value) == list:
# value = [value]
if field == "src":
continue
if field == "tokens":
for token in value:
if "tokens" in message:
if token in message["tokens"]:
matched_field_number += 1
matched[field] = token
# Break out of the token matching loop
break
# Continue to next field
continue
# Allow partial matches for msg
if field == "msg":
for msg in value:
if "msg" in message:
if msg.lower() in message["msg"].lower():
matched_field_number += 1
matched[field] = msg
# Break out of the msg matching loop
break
# Continue to next field
continue
if field in message and message[field] in value:
matched_field_number += 1
matched[field] = message[field]
if matched_field_number == rule_field_length - 2:
rule_matched(rule, message, matched)
class Command(BaseCommand): class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)

View File

@ -0,0 +1,52 @@
import asyncio
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from asgiref.sync import sync_to_async
from django.core.management.base import BaseCommand
from core.db.storage import db
from core.lib.parsing import QueryError
from core.lib.rules import NotificationRuleData
from core.models import NotificationRule
from core.util import logs
log = logs.get_logger("scheduling")
INTERVALS = [5, 60, 900, 1800, 3600, 14400, 86400]
async def job(interval_seconds):
"""
Run all schedules matching the given interval.
:param interval_seconds: The interval to run.
"""
matching_rules = await sync_to_async(list)(
NotificationRule.objects.filter(enabled=True, interval=interval_seconds)
)
for rule in matching_rules:
log.debug(f"Running rule {rule}")
try:
rule = NotificationRuleData(rule.user, rule, db=db)
await rule.run_schedule()
# results = await db.schedule_query_results(rule.user, rule)
except QueryError as e:
log.error(f"Error running rule {rule}: {e}")
class Command(BaseCommand):
def handle(self, *args, **options):
"""
Start the scheduling process.
"""
scheduler = AsyncIOScheduler()
for interval in INTERVALS:
log.debug(f"Scheduling {interval} second job")
scheduler.add_job(job, "interval", seconds=interval, args=[interval])
scheduler.start()
loop = asyncio.get_event_loop()
try:
loop.run_forever()
except (KeyboardInterrupt, SystemExit):
log.info("Process terminating")
finally:
loop.close()

View File

@ -0,0 +1,23 @@
# Generated by Django 4.1.3 on 2023-01-14 14:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0015_notificationrule_topic'),
]
operations = [
migrations.AddField(
model_name='notificationrule',
name='interval',
field=models.CharField(choices=[('ondemand', 'On demand'), ('minute', 'Every minute'), ('15m', 'Every 15 minutes'), ('30m', 'Every 30 minutes'), ('hour', 'Every hour'), ('4h', 'Every 4 hours'), ('day', 'Every day'), ('week', 'Every week'), ('month', 'Every month')], default='ondemand', max_length=255),
),
migrations.AddField(
model_name='notificationrule',
name='window',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 4.1.3 on 2023-01-14 14:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0016_notificationrule_interval_notificationrule_window'),
]
operations = [
migrations.AlterField(
model_name='notificationrule',
name='interval',
field=models.IntegerField(choices=[(0, 'On demand'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=0),
),
]

View File

@ -0,0 +1,27 @@
# Generated by Django 4.1.5 on 2023-01-15 00:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0017_alter_notificationrule_interval'),
]
operations = [
migrations.AlterModelOptions(
name='perms',
options={'permissions': (('post_irc', 'Can post to IRC'), ('post_discord', 'Can post to Discord'), ('use_insights', 'Can use the Insights page'), ('use_rules', 'Can use the Rules page'), ('index_internal', 'Can use the internal index'), ('index_meta', 'Can use the meta index'), ('index_restricted', 'Can use the restricted index'), ('restricted_sources', 'Can access restricted sources'))},
),
migrations.AddField(
model_name='notificationrule',
name='match',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='notificationrule',
name='interval',
field=models.IntegerField(choices=[(0, 'On demand'), (5, 'Every 5 seconds'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=0),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 4.1.5 on 2023-01-15 01:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0018_alter_perms_options_notificationrule_match_and_more'),
]
operations = [
migrations.AlterField(
model_name='notificationrule',
name='match',
field=models.JSONField(blank=True, null=True),
),
]

View File

@ -1,6 +1,7 @@
import logging import logging
import stripe import stripe
from django.conf import settings
from django.contrib.auth.models import AbstractUser from django.contrib.auth.models import AbstractUser
from django.db import models from django.db import models
from yaml import load from yaml import load
@ -23,6 +24,17 @@ PRIORITY_CHOICES = (
(5, "max"), (5, "max"),
) )
INTERVAL_CHOICES = (
(0, "On demand"),
(5, "Every 5 seconds"),
(60, "Every minute"),
(900, "Every 15 minutes"),
(1800, "Every 30 minutes"),
(3600, "Every hour"),
(14400, "Every 4 hours"),
(86400, "Every day"),
)
class Plan(models.Model): class Plan(models.Model):
name = models.CharField(max_length=255, unique=True) name = models.CharField(max_length=255, unique=True)
@ -78,6 +90,19 @@ class User(AbstractUser):
def get_notification_settings(self): def get_notification_settings(self):
return NotificationSettings.objects.get_or_create(user=self)[0] return NotificationSettings.objects.get_or_create(user=self)[0]
@property
def allowed_indices(self):
indices = [settings.INDEX_MAIN]
if self.has_perm("core.index_meta"):
indices.append(settings.INDEX_META)
if self.has_perm("core.index_internal"):
indices.append(settings.INDEX_INT)
if self.has_perm("core.index_restricted"):
if self.has_perm("core.restricted_sources"):
indices.append(settings.INDEX_RESTRICTED)
return indices
class Session(models.Model): class Session(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
@ -125,16 +150,10 @@ class ContentBlock(models.Model):
class Perms(models.Model): class Perms(models.Model):
class Meta: class Meta:
permissions = ( permissions = (
("bypass_hashing", "Can bypass field hashing"), #
("bypass_blacklist", "Can bypass the blacklist"), #
("bypass_encryption", "Can bypass field encryption"), #
("bypass_obfuscation", "Can bypass field obfuscation"), #
("bypass_delay", "Can bypass data delay"), #
("bypass_randomisation", "Can bypass data randomisation"), #
("post_irc", "Can post to IRC"), ("post_irc", "Can post to IRC"),
("post_discord", "Can post to Discord"), ("post_discord", "Can post to Discord"),
("query_search", "Can search with query strings"), #
("use_insights", "Can use the Insights page"), ("use_insights", "Can use the Insights page"),
("use_rules", "Can use the Rules page"),
("index_internal", "Can use the internal index"), ("index_internal", "Can use the internal index"),
("index_meta", "Can use the meta index"), ("index_meta", "Can use the meta index"),
("index_restricted", "Can use the restricted index"), ("index_restricted", "Can use the restricted index"),
@ -147,8 +166,11 @@ class NotificationRule(models.Model):
name = models.CharField(max_length=255) name = models.CharField(max_length=255)
priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1) priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1)
topic = models.CharField(max_length=255, null=True, blank=True) topic = models.CharField(max_length=255, null=True, blank=True)
interval = models.IntegerField(choices=INTERVAL_CHOICES, default=0)
window = models.CharField(max_length=255, null=True, blank=True)
enabled = models.BooleanField(default=True) enabled = models.BooleanField(default=True)
data = models.TextField() data = models.TextField()
match = models.JSONField(null=True, blank=True)
def __str__(self): def __str__(self):
return f"{self.user} - {self.name}" return f"{self.user} - {self.name}"

View File

@ -286,9 +286,21 @@
{% endif %} {% endif %}
{% if perms.core.use_insights %} {% if perms.core.use_insights %}
<a class="navbar-item" href="{# url 'insights' #}"> <div class="navbar-item has-dropdown is-hoverable">
Insights <a class="navbar-link">
</a> Insights
</a>
<div class="navbar-dropdown">
{% for index in user.allowed_indices %}
{% if index != "meta" %}
<a class="navbar-item" href="{% url 'insights' index=index %}">
{{ index }}
</a>
{% endif %}
{% endfor %}
</div>
</div>
{% endif %} {% endif %}
<a class="navbar-item add-button"> <a class="navbar-item add-button">
Install Install

View File

@ -11,6 +11,8 @@
<th>id</th> <th>id</th>
<th>user</th> <th>user</th>
<th>name</th> <th>name</th>
<th>interval</th>
<th>window</th>
<th>priority</th> <th>priority</th>
<th>topic</th> <th>topic</th>
<th>enabled</th> <th>enabled</th>
@ -22,6 +24,8 @@
<td>{{ item.id }}</td> <td>{{ item.id }}</td>
<td>{{ item.user }}</td> <td>{{ item.user }}</td>
<td>{{ item.name }}</td> <td>{{ item.name }}</td>
<td>{{ item.interval }}s</td>
<td>{{ item.window|default_if_none:"—" }}</td>
<td>{{ item.priority }}</td> <td>{{ item.priority }}</td>
<td>{{ item.topic|default_if_none:"—" }}</td> <td>{{ item.topic|default_if_none:"—" }}</td>
<td> <td>

View File

@ -4,7 +4,7 @@
style="display: none;" style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}' hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}' hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
hx-post="{% url 'chans_insights' %}" hx-post="{% url 'chans_insights' index=index %}"
hx-trigger="load" hx-trigger="load"
hx-target="#channels" hx-target="#channels"
hx-swap="outerHTML"> hx-swap="outerHTML">
@ -13,7 +13,7 @@
style="display: none;" style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}' hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}' hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
hx-post="{% url 'nicks_insights' %}" hx-post="{% url 'nicks_insights' index=index %}"
hx-trigger="load" hx-trigger="load"
hx-target="#nicks" hx-target="#nicks"
hx-swap="outerHTML"> hx-swap="outerHTML">
@ -81,7 +81,7 @@
{% if item.src == 'irc' %} {% if item.src == 'irc' %}
<button <button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}' hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_insights' %}" hx-post="{% url 'modal_insights' index=index %}"
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}", "channel": "{{ item.channel }}"}' hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}", "channel": "{{ item.channel }}"}'
hx-target="#modals-here" hx-target="#modals-here"
hx-trigger="click" hx-trigger="click"

View File

@ -2,39 +2,7 @@
{% load static %} {% load static %}
{% block content %} {% block content %}
{% include 'partials/notify.html' %} {% include 'partials/notify.html' %}
<script> <script src="{% static 'tabs.js' %}"></script>
// tabbed browsing for the modal
function initTabs() {
TABS.forEach((tab) => {
tab.addEventListener('click', (e) => {
let selected = tab.getAttribute('data-tab');
updateActiveTab(tab);
updateActiveContent(selected);
})
})
}
function updateActiveTab(selected) {
TABS.forEach((tab) => {
if (tab && tab.classList.contains(ACTIVE_CLASS)) {
tab.classList.remove(ACTIVE_CLASS);
}
});
selected.classList.add(ACTIVE_CLASS);
}
function updateActiveContent(selected) {
CONTENT.forEach((item) => {
if (item && item.classList.contains(ACTIVE_CLASS)) {
item.classList.remove(ACTIVE_CLASS);
}
let data = item.getAttribute('data-content');
if (data === selected) {
item.classList.add(ACTIVE_CLASS);
}
});
}
</script>
<style> <style>
.icon { border-bottom: 0px !important;} .icon { border-bottom: 0px !important;}
</style> </style>
@ -47,7 +15,7 @@
{% csrf_token %} {% csrf_token %}
<div class="field has-addons"> <div class="field has-addons">
<div class="control is-expanded has-icons-left"> <div class="control is-expanded has-icons-left">
<input id="query_full" name="query_full" class="input" type="text" placeholder="nickname"> <input id="query_full" name="query" class="input" type="text" placeholder="nickname">
<span class="icon is-small is-left"> <span class="icon is-small is-left">
<i class="fas fa-magnifying-glass"></i> <i class="fas fa-magnifying-glass"></i>
</span> </span>
@ -55,7 +23,7 @@
<div class="control"> <div class="control">
<button <button
class="button is-info is-fullwidth" class="button is-info is-fullwidth"
hx-post="{% url 'search_insights' %}" hx-post="{% url 'search_insights' index=index %}"
hx-trigger="click" hx-trigger="click"
hx-target="#info" hx-target="#info"
hx-swap="outerHTML"> hx-swap="outerHTML">

View File

@ -3,7 +3,7 @@
style="display: none;" style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}' hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-vals='{"net": "{{ net }}", "nicks": "{{ nicks }}"}' hx-vals='{"net": "{{ net }}", "nicks": "{{ nicks }}"}'
hx-post="{% url 'meta_insights' %}" hx-post="{% url 'meta_insights' index=index %}"
hx-trigger="load" hx-trigger="load"
hx-target="#meta" hx-target="#meta"
hx-swap="outerHTML"> hx-swap="outerHTML">

View File

@ -1,4 +1,4 @@
from django.contrib.auth.mixins import LoginRequiredMixin from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from core.forms import NotificationRuleForm, NotificationSettingsForm from core.forms import NotificationRuleForm, NotificationSettingsForm
from core.models import NotificationRule, NotificationSettings from core.models import NotificationRule, NotificationSettings
@ -7,7 +7,8 @@ from core.views.helpers import ObjectCreate, ObjectDelete, ObjectList, ObjectUpd
# Notifications - we create a new notification settings object if there isn't one # Notifications - we create a new notification settings object if there isn't one
# Hence, there is only an update view, not a create view. # Hence, there is only an update view, not a create view.
class NotificationsUpdate(LoginRequiredMixin, ObjectUpdate): class NotificationsUpdate(LoginRequiredMixin, PermissionRequiredMixin, ObjectUpdate):
permission_required = "use_rules"
model = NotificationSettings model = NotificationSettings
form_class = NotificationSettingsForm form_class = NotificationSettingsForm
@ -33,7 +34,7 @@ class NotificationsUpdate(LoginRequiredMixin, ObjectUpdate):
class RuleList(LoginRequiredMixin, ObjectList): class RuleList(LoginRequiredMixin, ObjectList):
list_template = "partials/rule-list.html" list_template = "partials/rule-list.html"
model = NotificationRule model = NotificationRule
page_title = "List of notification rules." page_title = "List of notification rules"
list_url_name = "rules" list_url_name = "rules"
list_url_args = ["type"] list_url_args = ["type"]
@ -41,19 +42,22 @@ class RuleList(LoginRequiredMixin, ObjectList):
submit_url_name = "rule_create" submit_url_name = "rule_create"
class RuleCreate(LoginRequiredMixin, ObjectCreate): class RuleCreate(LoginRequiredMixin, PermissionRequiredMixin, ObjectCreate):
permission_required = "use_rules"
model = NotificationRule model = NotificationRule
form_class = NotificationRuleForm form_class = NotificationRuleForm
submit_url_name = "rule_create" submit_url_name = "rule_create"
class RuleUpdate(LoginRequiredMixin, ObjectUpdate): class RuleUpdate(LoginRequiredMixin, PermissionRequiredMixin, ObjectUpdate):
permission_required = "use_rules"
model = NotificationRule model = NotificationRule
form_class = NotificationRuleForm form_class = NotificationRuleForm
submit_url_name = "rule_update" submit_url_name = "rule_update"
class RuleDelete(LoginRequiredMixin, ObjectDelete): class RuleDelete(LoginRequiredMixin, PermissionRequiredMixin, ObjectDelete):
permission_required = "use_rules"
model = NotificationRule model = NotificationRule

View File

@ -7,7 +7,7 @@ from django.views import View
from rest_framework.parsers import FormParser from rest_framework.parsers import FormParser
from rest_framework.views import APIView from rest_framework.views import APIView
from core.db.druid import query_single_result from core.db.storage import db
from core.lib.meta import get_meta from core.lib.meta import get_meta
from core.lib.nicktrace import get_nicks from core.lib.nicktrace import get_nicks
from core.lib.threshold import ( from core.lib.threshold import (
@ -23,8 +23,9 @@ class Insights(LoginRequiredMixin, PermissionRequiredMixin, View):
template_name = "ui/insights/insights.html" template_name = "ui/insights/insights.html"
permission_required = "use_insights" permission_required = "use_insights"
def get(self, request): def get(self, request, index):
return render(request, self.template_name) context = {"index": index}
return render(request, self.template_name, context)
class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View): class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View):
@ -32,13 +33,16 @@ class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View):
template_name = "ui/insights/info.html" template_name = "ui/insights/info.html"
permission_required = "use_insights" permission_required = "use_insights"
def post(self, request): def post(self, request, index):
query_params = request.POST.dict() query_params = request.POST.dict()
if "query_full" in query_params: if "query" in query_params:
query_params["query_full"] = "nick: " + query_params["query_full"] query_params["query"] = "nick: " + query_params["query"]
context = query_single_result(request, query_params) query_params["source"] = "all"
query_params["index"] = index
context = db.query_single_result(request, query_params)
if not context: if not context:
return HttpResponseForbidden() return HttpResponseForbidden()
context["index"] = index
return render(request, self.template_name, context) return render(request, self.template_name, context)
@ -47,7 +51,7 @@ class InsightsChannels(LoginRequiredMixin, PermissionRequiredMixin, APIView):
template_name = "ui/insights/channels.html" template_name = "ui/insights/channels.html"
permission_required = "use_insights" permission_required = "use_insights"
def post(self, request): def post(self, request, index):
if "net" not in request.data: if "net" not in request.data:
return HttpResponse("No net") return HttpResponse("No net")
if "nick" not in request.data: if "nick" not in request.data:
@ -58,7 +62,13 @@ class InsightsChannels(LoginRequiredMixin, PermissionRequiredMixin, APIView):
num_users = annotate_num_users(net, chans) num_users = annotate_num_users(net, chans)
if not chans: if not chans:
return HttpResponseForbidden() return HttpResponseForbidden()
context = {"net": net, "nick": nick, "chans": chans, "num_users": num_users} context = {
"net": net,
"nick": nick,
"chans": chans,
"num_users": num_users,
"index": index,
}
return render(request, self.template_name, context) return render(request, self.template_name, context)
@ -67,7 +77,7 @@ class InsightsNicks(LoginRequiredMixin, PermissionRequiredMixin, APIView):
template_name = "ui/insights/nicks.html" template_name = "ui/insights/nicks.html"
permission_required = "use_insights" permission_required = "use_insights"
def post(self, request): def post(self, request, index):
if "net" not in request.data: if "net" not in request.data:
return HttpResponse("No net") return HttpResponse("No net")
if "nick" not in request.data: if "nick" not in request.data:
@ -82,7 +92,13 @@ class InsightsNicks(LoginRequiredMixin, PermissionRequiredMixin, APIView):
online = annotate_online(net, nicks) online = annotate_online(net, nicks)
if not nicks: if not nicks:
return HttpResponseForbidden() return HttpResponseForbidden()
context = {"net": net, "nick": nick, "nicks": nicks, "online": online} context = {
"net": net,
"nick": nick,
"nicks": nicks,
"online": online,
"index": index,
}
return render(request, self.template_name, context) return render(request, self.template_name, context)
@ -91,7 +107,7 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
template_name = "ui/insights/meta.html" template_name = "ui/insights/meta.html"
permission_required = "use_insights" permission_required = "use_insights"
def post(self, request): def post(self, request, index):
if "net" not in request.data: if "net" not in request.data:
return HttpResponse("No net") return HttpResponse("No net")
if "nicks" not in request.data: if "nicks" not in request.data:
@ -99,6 +115,10 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
net = request.data["net"] net = request.data["net"]
nicks = request.data["nicks"] nicks = request.data["nicks"]
nicks = literal_eval(nicks) nicks = literal_eval(nicks)
# Check the user has permissions to use the meta index
if not request.user.has_perm("core.index_meta"):
return HttpResponseForbidden()
meta = get_meta(request, net, nicks) meta = get_meta(request, net, nicks)
unique_values = {} unique_values = {}
# Create a map of unique values for each key for each nick # Create a map of unique values for each key for each nick
@ -122,7 +142,7 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
meta_dedup[k].add(v) meta_dedup[k].add(v)
unique_values[nick][k].remove(v) unique_values[nick][k].remove(v)
context = {"net": net, "nicks": nicks, "meta": meta_dedup} context = {"net": net, "nicks": nicks, "meta": meta_dedup, "index": index}
return render(request, self.template_name, context) return render(request, self.template_name, context)
@ -131,7 +151,7 @@ class InsightsInfoModal(LoginRequiredMixin, PermissionRequiredMixin, APIView):
template_name = "modals/drilldown.html" template_name = "modals/drilldown.html"
permission_required = "use_insights" permission_required = "use_insights"
def post(self, request): def post(self, request, index):
if "net" not in request.data: if "net" not in request.data:
return JsonResponse({"success": False}) return JsonResponse({"success": False})
if "nick" not in request.data: if "nick" not in request.data:
@ -163,5 +183,6 @@ class InsightsInfoModal(LoginRequiredMixin, PermissionRequiredMixin, APIView):
"inter_users": inter_users, "inter_users": inter_users,
"num_users": num_users, "num_users": num_users,
"num_chans": num_chans, "num_chans": num_chans,
"index": index,
} }
return render(request, self.template_name, context) return render(request, self.template_name, context)

View File

@ -58,6 +58,36 @@ services:
networks: networks:
- default - default
scheduling:
image: pathogen/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
migration: migration:
image: pathogen/neptune:latest image: pathogen/neptune:latest
container_name: migration_neptune container_name: migration_neptune

View File

@ -4,7 +4,7 @@ django
pre-commit pre-commit
django-crispy-forms django-crispy-forms
crispy-bulma crispy-bulma
elasticsearch elasticsearch[async]
stripe stripe
django-rest-framework django-rest-framework
numpy numpy
@ -19,3 +19,4 @@ django-debug-toolbar
django-debug-toolbar-template-profiler django-debug-toolbar-template-profiler
orjson orjson
msgpack msgpack
apscheduler