Compare commits

...

32 Commits

Author SHA1 Message Date
b61eda52e6 Update URLs 2025-05-09 21:01:58 +00:00
a4c3834b62 Port Manticore and debug 2025-01-23 11:35:39 +00:00
fe087eb591 Update more Redis URLs 2025-01-23 11:34:24 +00:00
6ff75890b9 Update Redis path to socket 2025-01-23 11:33:37 +00:00
c199d3d078 Update admin URL 2025-01-23 11:33:17 +00:00
66f4e4c264 Add static directory generated by collectstatic to ignore 2025-01-23 11:32:54 +00:00
b16db665e5 Update to work with Podman 2025-01-23 11:32:13 +00:00
86fb2ac593 Update to run with Podman 2024-12-29 17:37:23 +00:00
a519a4ce5e Change Redis parser class 2023-09-30 10:45:47 +00:00
f62f0881a1 Show ingest status 2023-02-14 07:20:28 +00:00
fd47a3ddc8 Use the sentiment aggregation value if present 2023-02-14 07:20:28 +00:00
d8cb3a263b Add dot 2023-02-14 07:20:27 +00:00
27fea06198 Allow disabling ingesting 2023-02-13 21:03:33 +00:00
0e12b0d185 Properly search tokens and annotate in matched field 2023-02-13 18:14:25 +00:00
6fe31d99a9 Re-add matches field 2023-02-13 17:23:30 +00:00
1ab7a95ebd Remove debug statements 2023-02-13 17:23:27 +00:00
d581d787de Increase topic length 2023-02-13 07:20:28 +00:00
4ead6ff7c1 Use cachalot to invalidate caches 2023-02-11 17:24:13 +00:00
9fcf5041f0 Use Hiredis 2023-02-11 16:01:42 +00:00
2fc476b830 Vary cache on URL 2023-02-11 15:48:46 +00:00
11d4542412 Cache the table and remove CRUD tools included in mixins 2023-02-11 15:44:20 +00:00
5d6f96bbf3 Cache more object lists 2023-02-11 14:58:36 +00:00
40a710f41e Add caching 2023-02-11 14:03:50 +00:00
87c232d3f9 Fix notification delivery 2023-02-10 22:52:59 +00:00
df273a6009 Switch database location and use mixins for CRUD 2023-02-10 20:57:17 +00:00
115c6dd1ad Add mixins and adjust database path 2023-02-10 20:53:11 +00:00
330cc6c401 Fix showing the debug toolbar 2023-02-10 07:20:12 +00:00
2050e6cb47 Add more comments about source parsing 2023-02-10 07:20:36 +00:00
7d0ebf87bd Fix source parsing and set default to all 2023-02-10 07:20:22 +00:00
c5856ce20b Use HX-Replace-Url properly and don't include column shifter twice on load 2023-02-10 07:20:22 +00:00
0518c9fe1c Remove comma after last entry in column shifter 2023-02-10 07:20:11 +00:00
29e57628e4 HX-Replace URLs instead of pushing 2023-02-09 23:38:12 +00:00
63 changed files with 2820 additions and 2479 deletions

4
.gitignore vendored
View File

@@ -58,7 +58,6 @@ cover/
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
@@ -156,3 +155,6 @@ cython_debug/
.bash_history
.python_history
.vscode/
stack.env
static/

View File

@@ -2,17 +2,17 @@
FROM python:3
ARG OPERATION
RUN useradd -d /code pathogen
RUN useradd -d /code xf
RUN mkdir -p /code
RUN chown -R pathogen:pathogen /code
RUN chown -R xf:xf /code
RUN mkdir -p /conf/static
RUN chown -R pathogen:pathogen /conf
RUN chown -R xf:xf /conf
RUN mkdir /venv
RUN chown pathogen:pathogen /venv
RUN chown xf:xf /venv
USER pathogen
USER xf
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /code

View File

@@ -1,20 +1,20 @@
run:
docker-compose --env-file=stack.env up -d
docker-compose -f docker-compose.prod.yml --env-file=stack.env up -d
build:
docker-compose --env-file=stack.env build
docker-compose -f docker-compose.prod.yml --env-file=stack.env build
stop:
docker-compose --env-file=stack.env down
docker-compose -f docker-compose.prod.yml --env-file=stack.env down
log:
docker-compose --env-file=stack.env logs -f
docker-compose -f docker-compose.prod.yml --env-file=stack.env logs -f --names
migrate:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"

View File

@@ -1,3 +1,5 @@
from os import getenv
# Elasticsearch settings
ELASTICSEARCH_URL = "10.1.0.1"
ELASTICSEARCH_PORT = 9200
@@ -29,7 +31,7 @@ DRILLDOWN_DEFAULT_PARAMS = {
"size": "15",
"index": "main",
"sorting": "desc",
"source": "4ch",
"source": "all",
}
@@ -104,3 +106,8 @@ META_QUERY_SIZE = 10000
DEBUG = True
PROFILER = False
REDIS_HOST = getenv("REDIS_HOST", "redis_fisk_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))

87
app/local_settings.py Normal file
View File

@@ -0,0 +1,87 @@
from os import getenv
trues = ("t", "true", "yes", "y", "1")
# Elasticsearch settings
ELASTICSEARCH_URL = getenv("ELASTICSEARCH_URL", "10.1.0.1")
ELASTICSEARCH_PORT = int(getenv("ELASTICSEARCH_PORT", "9200"))
ELASTICSEARCH_TLS = getenv("ELASTICSEARCH_TLS", "True").lower() in trues
ELASTICSEARCH_USERNAME = getenv("ELASTICSEARCH_USERNAME", "admin")
ELASTICSEARCH_PASSWORD = getenv("ELASTICSEARCH_PASSWORD", "secret")
# Manticore settings
MANTICORE_URL = getenv("MANTICORE_URL", "http://example-db-1:9308")
DB_BACKEND = getenv("DB_BACKEND", "MANTICORE")
# Common DB settings
INDEX_MAIN = getenv("INDEX_MAIN", "main")
INDEX_RESTRICTED = getenv("INDEX_RESTRICTED", "restricted")
INDEX_META = getenv("INDEX_META", "meta")
INDEX_INT = getenv("INDEX_INT", "internal")
INDEX_RULE_STORAGE = getenv("INDEX_RULE_STORAGE", "rule_storage")
MAIN_SIZES = getenv("MAIN_SIZES", "1,5,15,30,50,100,250,500,1000").split(",")
MAIN_SIZES_ANON = getenv("MAIN_SIZES_ANON", "1,5,15,30,50,100").split(",")
MAIN_SOURCES = getenv("MAIN_SOURCES", "dis,4ch,all").split(",")
SOURCES_RESTRICTED = getenv("SOURCES_RESTRICTED", "irc").split(",")
CACHE = getenv("CACHE", "False").lower() in trues
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", "2"))
DRILLDOWN_RESULTS_PER_PAGE = int(getenv("DRILLDOWN_RESULTS_PER_PAGE", "15"))
DRILLDOWN_DEFAULT_PARAMS = {
"size": getenv("DRILLDOWN_DEFAULT_SIZE", "15"),
"index": getenv("DRILLDOWN_DEFAULT_INDEX", "main"),
"sorting": getenv("DRILLDOWN_DEFAULT_SORTING", "desc"),
"source": getenv("DRILLDOWN_DEFAULT_SOURCE", "all"),
}
# URLs
DOMAIN = getenv("DOMAIN", "example.com")
URL = getenv("URL", f"https://{DOMAIN}")
# Access control
ALLOWED_HOSTS = getenv("ALLOWED_HOSTS", f"127.0.0.1,{DOMAIN}").split(",")
# CSRF
CSRF_TRUSTED_ORIGINS = getenv("CSRF_TRUSTED_ORIGINS", URL).split(",")
# Stripe
BILLING_ENABLED = getenv("BILLING_ENABLED", "false").lower() in trues
STRIPE_TEST = getenv("STRIPE_TEST", "True").lower() in trues
STRIPE_API_KEY_TEST = getenv("STRIPE_API_KEY_TEST", "")
STRIPE_PUBLIC_API_KEY_TEST = getenv("STRIPE_PUBLIC_API_KEY_TEST", "")
STRIPE_API_KEY_PROD = getenv("STRIPE_API_KEY_PROD", "")
STRIPE_PUBLIC_API_KEY_PROD = getenv("STRIPE_PUBLIC_API_KEY_PROD", "")
STRIPE_ENDPOINT_SECRET = getenv("STRIPE_ENDPOINT_SECRET", "")
STATIC_ROOT = getenv("STATIC_ROOT", "")
SECRET_KEY = getenv("SECRET_KEY", "a")
STRIPE_ADMIN_COUPON = getenv("STRIPE_ADMIN_COUPON", "")
# Threshold
THRESHOLD_ENDPOINT = getenv("THRESHOLD_ENDPOINT", "http://threshold:13869")
THRESHOLD_API_KEY = getenv("THRESHOLD_API_KEY", "api_1")
THRESHOLD_API_TOKEN = getenv("THRESHOLD_API_TOKEN", "")
THRESHOLD_API_COUNTER = getenv("THRESHOLD_API_COUNTER", "")
# NickTrace
NICKTRACE_MAX_ITERATIONS = int(getenv("NICKTRACE_MAX_ITERATIONS", "4"))
NICKTRACE_MAX_CHUNK_SIZE = int(getenv("NICKTRACE_MAX_CHUNK_SIZE", "500"))
NICKTRACE_QUERY_SIZE = int(getenv("NICKTRACE_QUERY_SIZE", "10000"))
# Meta
META_MAX_ITERATIONS = int(getenv("META_MAX_ITERATIONS", "4"))
META_MAX_CHUNK_SIZE = int(getenv("META_MAX_CHUNK_SIZE", "500"))
META_QUERY_SIZE = int(getenv("META_QUERY_SIZE", "10000"))
DEBUG = getenv("DEBUG", "True").lower() in trues
PROFILER = getenv("PROFILER", "False").lower() in trues
REDIS_HOST = getenv("REDIS_HOST", "redis_neptune_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "1"))
REDIS_DB_CACHE = int(getenv("REDIS_DB_CACHE", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
# Elasticsearch blacklist
ELASTICSEARCH_BLACKLISTED = {}

View File

@@ -43,7 +43,10 @@ INSTALLED_APPS = [
"django_tables2",
"django_tables2_bulma_template",
"prettyjson",
"mixins",
"cachalot",
]
CRISPY_TEMPLATE_PACK = "bulma"
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
@@ -52,7 +55,9 @@ MIDDLEWARE = [
"debug_toolbar.middleware.DebugToolbarMiddleware",
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
# 'django.middleware.cache.UpdateCacheMiddleware',
"django.middleware.common.CommonMiddleware",
# 'django.middleware.cache.FetchFromCacheMiddleware',
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
@@ -87,7 +92,7 @@ WSGI_APPLICATION = "app.wsgi.application"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": BASE_DIR / "db.sqlite3",
"NAME": "/conf/db.sqlite3",
}
}
@@ -145,7 +150,7 @@ REST_FRAMEWORK = {
INTERNAL_IPS = [
"127.0.0.1",
"10.1.10.11",
# "10.1.10.11",
]
DEBUG_TOOLBAR_PANELS = [
@@ -164,10 +169,27 @@ DEBUG_TOOLBAR_PANELS = [
"debug_toolbar.panels.logging.LoggingPanel",
"debug_toolbar.panels.redirects.RedirectsPanel",
"debug_toolbar.panels.profiling.ProfilingPanel",
"cachalot.panels.CachalotPanel",
]
from app.local_settings import * # noqa
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "unix:///var/run/socks/redis.sock",
# "LOCATION": f"redis://{REDIS_HOST}:{REDIS_PORT}",
"LOCATION": "unix:///var/run/neptune-redis.sock",
"OPTIONS": {
"db": REDIS_DB,
# "parser_class": "django_redis.cache.RedisCache",
# "PASSWORD": REDIS_PASSWORD,
"pool_class": "redis.BlockingConnectionPool",
},
}
}
if PROFILER: # noqa - trust me its there
import pyroscope
@@ -179,3 +201,12 @@ if PROFILER: # noqa - trust me its there
# "region": f'{os.getenv("REGION")}',
# }
)
def show_toolbar(request):
return DEBUG # noqa: from local imports
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": show_toolbar,
}

View File

@@ -58,6 +58,9 @@ from core.views.manage.threshold.threshold import (
ThresholdIRCOverview,
)
# Stats
from core.views.manage.monolith import stats
# Main tool pages
from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
DrilldownContextModal,
@@ -92,7 +95,7 @@ urlpatterns = [
),
path("cancel/", TemplateView.as_view(template_name="cancel.html"), name="cancel"),
path("portal", Portal.as_view(), name="portal"),
path("admin/", admin.site.urls),
path("sapp/", admin.site.urls),
path("accounts/", include("django.contrib.auth.urls")),
path("accounts/signup/", Signup.as_view(), name="signup"),
##
@@ -311,4 +314,14 @@ urlpatterns = [
notifications.RuleClear.as_view(),
name="rule_clear",
),
path(
"manage/monolith/stats/",
stats.MonolithStats.as_view(),
name="monolith_stats",
),
path(
"manage/monolith/stats_db/<str:type>/",
stats.MonolithDBStats.as_view(),
name="monolith_stats_db",
)
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)

View File

@@ -6,8 +6,15 @@ from redis import StrictRedis
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
# /var/run/neptune-redis.sock
# use the socket
r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=settings.REDIS_DB)
# r = StrictRedis(
# host=settings.REDIS_HOST,
# port=settings.REDIS_PORT,
# password=settings.REDIS_PASSWORD,
# db=settings.REDIS_DB
# )
if settings.STRIPE_TEST:
stripe.api_key = settings.STRIPE_API_KEY_TEST

View File

@@ -168,6 +168,71 @@ class StorageBackend(ABC):
# Actually get rid of all the things we set to None
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
def add_bool(self, search_query, add_bool):
"""
Add the specified boolean matches to search query.
"""
if not add_bool:
return
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match_phrase": item})
def add_top(self, search_query, add_top, negative=False):
"""
Merge add_top with the base of the search_query.
"""
if not add_top:
return
if negative:
for item in add_top:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
else:
for item in add_top:
if "query" not in search_query:
search_query["query"] = {"bool": {"must": []}}
search_query["query"]["bool"]["must"].append(item)
def schedule_check_aggregations(self, rule_object, result_map):
"""
Check the results of a scheduled query for aggregations.
"""
if rule_object.aggs is None:
return result_map
for index, (meta, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta["aggs"]:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map
def query(self, user, search_query, **kwargs):
# For time tracking
start = time.process_time()
@@ -198,7 +263,20 @@ class StorageBackend(ABC):
if "took" in response:
if response["took"] is None:
return None
if len(response["hits"]["hits"]) == 0:
if "error" in response:
message = f"Error: {response['error']}"
message_class = "danger"
time_took = (time.process_time() - start) * 1000
# Round to 3 significant figures
time_took_rounded = round(
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
)
return {
"message": message,
"class": message_class,
"took": time_took_rounded,
}
elif len(response["hits"]["hits"]) == 0:
message = "No results."
message_class = "danger"
time_took = (time.process_time() - start) * 1000
@@ -213,7 +291,7 @@ class StorageBackend(ABC):
}
# For Druid
if "error" in response:
elif "error" in response:
if "errorMessage" in response:
context = {
"message": response["errorMessage"],
@@ -240,6 +318,106 @@ class StorageBackend(ABC):
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
return {"object_list": response_parsed, "took": time_took_rounded}
def construct_context_query(
self, index, net, channel, src, num, size, type=None, nicks=None
):
# Get the initial query
query = self.construct_query(None, size, blank=True)
extra_must = []
extra_should = []
extra_should2 = []
if num:
extra_must.append({"match_phrase": {"num": num}})
if net:
extra_must.append({"match_phrase": {"net": net}})
if channel:
extra_must.append({"match": {"channel": channel}})
if nicks:
for nick in nicks:
extra_should2.append({"match": {"nick": nick}})
types = ["msg", "notice", "action", "kick", "topic", "mode"]
fields = [
"nick",
"ident",
"host",
"channel",
"ts",
"msg",
"type",
"net",
"src",
"tokens",
]
query["fields"] = fields
if index == "internal":
fields.append("mtype")
if channel == "*status" or type == "znc":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
# Type is one of msg or notice
# extra_should.append({"match": {"mtype": "msg"}})
# extra_should.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "znc"}})
extra_should.append({"match": {"type": "self"}})
extra_should2.append({"match": {"type": "znc"}})
extra_should2.append({"match": {"nick": channel}})
elif type == "auth":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
extra_should2.append({"match": {"nick": channel}})
# extra_should2.append({"match": {"mtype": "msg"}})
# extra_should2.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "query"}})
extra_should2.append({"match": {"type": "self"}})
extra_should.append({"match": {"nick": channel}})
else:
for ctype in types:
extra_should.append({"match": {"mtype": ctype}})
else:
for ctype in types:
extra_should.append({"match": {"type": ctype}})
# query = {
# "index": index,
# "limit": size,
# "query": {
# "bool": {
# "must": [
# # {"equals": {"src": src}},
# # {
# # "bool": {
# # "should": [*extra_should],
# # }
# # },
# # {
# # "bool": {
# # "should": [*extra_should2],
# # }
# # },
# *extra_must,
# ]
# }
# },
# "fields": fields,
# # "_source": False,
# }
if extra_must:
for x in extra_must:
query["query"]["bool"]["must"].append(x)
if extra_should:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2:
query["query"]["bool"]["must"].append(
{"bool": {"should": [*extra_should2]}}
)
return query
@abstractmethod
def query_results(self, **kwargs):
pass

View File

@@ -338,8 +338,15 @@ class ElasticsearchBackend(StorageBackend):
{"match_phrase": {"src": source_iter}}
)
add_top.append(add_top_tmp)
if "tokens" in data:
add_top_tmp = {"bool": {"should": []}}
for token in data["tokens"]:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"tokens": token}}
)
add_top.append(add_top_tmp)
for field, values in data.items():
if field not in ["source", "index", "tags", "query", "sentiment"]:
if field not in ["source", "index", "tags", "query", "sentiment", "tokens"]:
for value in values:
add_top.append({"match": {field: value}})
# Bypass the check for query and tags membership since we can search by msg, etc
@@ -350,8 +357,8 @@ class ElasticsearchBackend(StorageBackend):
range_query = {
"range": {
"ts": {
"gte": f"now-{rule_object.window}/d",
"lte": "now/d",
"gte": f"now-{rule_object.window}",
"lte": "now",
}
}
}
@@ -367,44 +374,6 @@ class ElasticsearchBackend(StorageBackend):
return search_query
def schedule_check_aggregations(self, rule_object, result_map):
"""
Check the results of a scheduled query for aggregations.
"""
if rule_object.aggs is None:
return result_map
for index, (meta, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta["aggs"]:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map
def schedule_query_results_test_sync(self, rule_object):
"""
Helper to run a scheduled query test with reduced functionality.
@@ -542,13 +511,17 @@ class ElasticsearchBackend(StorageBackend):
total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
# If the sources the user has access to are equal to all
# possible sources, then we don't need to add the source
# filter to the query.
if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}}
for source_iter in sources:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}}
)
if rule_object is not None and query_params["source"] != "all":
if query_params["source"] != "all":
add_top.append(add_top_tmp)
# R - Ranges
@@ -591,24 +564,28 @@ class ElasticsearchBackend(StorageBackend):
if isinstance(sentiment_r, dict):
return sentiment_r
if sentiment_r:
if rule_object is not None:
sentiment_index = "meta.aggs.avg_sentiment.value"
else:
sentiment_index = "sentiment"
sentiment_method, sentiment = sentiment_r
range_query_compare = {"range": {"sentiment": {}}}
range_query_compare = {"range": {sentiment_index: {}}}
range_query_precise = {
"match": {
"sentiment": None,
sentiment_index: None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
range_query_compare["range"][sentiment_index]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
range_query_compare["range"][sentiment_index]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
range_query_precise["match"][sentiment_index] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
range_query_precise["match"][sentiment_index] = 0
add_top_negative.append(range_query_precise)
# Add in the additional information we already populated

View File

@@ -1,12 +1,24 @@
import logging
from datetime import datetime
from pprint import pprint
import httpx
import orjson
import requests
from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results
from core.db.processing import parse_results
from core.lib.parsing import (
QueryError,
parse_date_time,
parse_index,
parse_rule,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
logger = logging.getLogger(__name__)
@@ -21,14 +33,27 @@ class ManticoreBackend(StorageBackend):
"""
pass # we use requests
def construct_query(self, query, size, index, blank=False):
async def async_initialise(self, **kwargs):
"""
Initialise the Manticore client in async mode
"""
pass # we use requests
def delete_rule_entries(self, rule_id):
"""
Delete all entries for a given rule.
:param rule_id: The rule ID to delete.
"""
# TODO
def construct_query(self, query, size=None, blank=False, **kwargs):
"""
Accept some query parameters and construct an OpenSearch query.
"""
if not size:
size = 5
query_base = {
"index": index,
"index": kwargs.get("index"),
"limit": size,
"query": {"bool": {"must": []}},
}
@@ -39,11 +64,79 @@ class ManticoreBackend(StorageBackend):
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def run_query(self, client, user, search_query):
def parse(self, response, **kwargs):
parsed = parse_results(response, **kwargs)
return parsed
def run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query.
"""
index = kwargs.get("index")
raw = kwargs.get("raw")
if search_query and not raw:
search_query["index"] = index
path = kwargs.get("path", "json/search")
if raw:
response = requests.post(
f"{settings.MANTICORE_URL}/{path}", search_query
)
else:
response = requests.post(
f"{settings.MANTICORE_URL}/{path}", json=search_query
)
return orjson.loads(response.text)
async def async_run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query asynchronously.
"""
index = kwargs.get("index")
search_query["index"] = index
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query
)
return response
return orjson.loads(response.text)
async def async_store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def prepare_schedule_query(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality.
"""
# TODO
def schedule_query_results_test_sync(self, rule_object):
"""
Helper to run a scheduled query test with reduced functionality.
Sync version for running from Django forms.
Does not return results.
"""
# TODO
async def schedule_query_results(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality and async.
"""
# TODO
def query_results(
self,
@@ -67,117 +160,77 @@ class ManticoreBackend(StorageBackend):
query_created = False
source = None
add_defaults(query_params)
# Check size
# Now, run the helpers for SIQTSRSS/ADR
# S - Size
# I - Index
# Q - Query
# T - Tags
# S - Source
# R - Ranges
# S - Sort
# S - Sentiment
# A - Annotate
# D - Dedup
# R - Reverse
# S - Size
if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON
sizes = settings.MAIN_SIZES_ANON
else:
sizes = settings.MANTICORE_MAIN_SIZES
sizes = settings.MAIN_SIZES
if not size:
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
size = parse_size(query_params, sizes)
if isinstance(size, dict):
return size
rule_object = parse_rule(request.user, query_params)
if isinstance(rule_object, dict):
return rule_object
if rule_object is not None:
index = settings.INDEX_RULE_STORAGE
add_bool.append({"rule_id": str(rule_object.id)})
else:
size = 20
# I - Index
index = parse_index(request.user, query_params)
if isinstance(index, dict):
return index
# Check index
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.MANTICORE_INDEX_MAIN
else:
if not request.user.has_perm(f"core.index_{index}"):
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.MANTICORE_INDEX_MAIN
# Q/T - Query/Tags
search_query = self.parse_query(
query_params, tags, size, custom_query, add_bool
)
# Query should be a dict, so check if it contains message here
if "message" in search_query:
return search_query
# Create the search query
if "query" in query_params:
query = query_params["query"]
search_query = self.construct_query(query, size, index)
query_created = True
else:
if custom_query:
search_query = custom_query
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, index, blank=True)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
# Check for a source
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
# S - Sources
sources = parse_source(request.user, query_params)
if isinstance(sources, dict):
return sources
total_count = len(sources)
# Total is -1 due to the "all" source
total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
# If the sources the user has access to are equal to all
# possible sources, then we don't need to add the source
# filter to the query.
if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
total_count += 1
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if not total_count == total_sources:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}}
)
if query_params["source"] != "all":
add_top.append(add_top_tmp)
# Date/time range
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
# R - Ranges
# date_query = False
from_ts, to_ts = parse_date_time(query_params)
if from_ts:
range_query = {
"range": {
"ts": {
@@ -188,115 +241,87 @@ class ManticoreBackend(StorageBackend):
}
add_top.append(range_query)
# Sorting
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
# S - Sort
sort = parse_sort(query_params)
if isinstance(sort, dict):
return sort
if rule_object is not None:
field = "match_ts"
else:
field = "ts"
if sort:
# For Druid compatibility
sort_map = {"ascending": "asc", "descending": "desc"}
sorting = [
{
"ts": {
"order": sorting,
field: {
"order": sort_map[sort],
}
}
]
search_query["sort"] = sorting
# Sentiment handling
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
# S - Sentiment
sentiment_r = parse_sentiment(query_params)
if isinstance(sentiment_r, dict):
return sentiment_r
if sentiment_r:
if rule_object is not None:
sentiment_index = "meta.aggs.avg_sentiment.value"
else:
sentiment_index = "sentiment"
sentiment_method, sentiment = sentiment_r
range_query_compare = {"range": {sentiment_index: {}}}
range_query_precise = {
"match": {
"sentiment": None,
sentiment_index: None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
range_query_compare["range"][sentiment_index]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
range_query_compare["range"][sentiment_index]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
range_query_precise["match"][sentiment_index] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
range_query_precise["match"][sentiment_index] = 0
add_top_negative.append(range_query_precise)
if add_bool:
# if "bool" not in search_query["query"]:
# search_query["query"]["bool"] = {}
# if "must" not in search_query["query"]["bool"]:
# search_query["query"]["bool"] = {"must": []}
# Add in the additional information we already populated
self.add_bool(search_query, add_bool)
self.add_top(search_query, add_top)
self.add_top(search_query, add_top_negative, negative=True)
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match": item})
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
response = self.query(
request.user,
search_query,
index=index,
)
if not results:
if not response:
message = "Error running query"
message_class = "danger"
return {"message": message, "class": message_class}
# results = results.to_dict()
if "error" in results:
message = results["error"]
if "error" in response:
message = response["error"]
message_class = "danger"
return {"message": message, "class": message_class}
results_parsed = parse_results(results)
if annotate:
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if "message" in response:
return response
if reverse:
results_parsed = results_parsed[::-1]
# A/D/R - Annotate/Dedup/Reverse
response["object_list"] = self.process_results(
response["object_list"],
annotate=annotate,
dedup=dedup,
dedup_fields=dedup_fields,
reverse=reverse,
)
if dedup:
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
context = response
return context

302
core/db/manticore_orig.py Normal file
View File

@@ -0,0 +1,302 @@
import logging
from datetime import datetime
from pprint import pprint
import requests
from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results
logger = logging.getLogger(__name__)
class ManticoreBackend(StorageBackend):
def __init__(self):
super().__init__("manticore")
def initialise(self, **kwargs):
"""
Initialise the Manticore client
"""
pass # we use requests
def construct_query(self, query, size, index, blank=False):
"""
Accept some query parameters and construct an OpenSearch query.
"""
if not size:
size = 5
query_base = {
"index": index,
"limit": size,
"query": {"bool": {"must": []}},
}
query_string = {
"query_string": query,
}
if not blank:
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def run_query(self, client, user, search_query):
response = requests.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query
)
return response
def query_results(
self,
request,
query_params,
size=None,
annotate=True,
custom_query=False,
reverse=False,
dedup=False,
dedup_fields=None,
tags=None,
):
query = None
message = None
message_class = None
add_bool = []
add_top = []
add_top_negative = []
sort = None
query_created = False
source = None
add_defaults(query_params)
# Check size
if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON
else:
sizes = settings.MANTICORE_MAIN_SIZES
if not size:
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 20
# Check index
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.MANTICORE_INDEX_MAIN
else:
if not request.user.has_perm(f"core.index_{index}"):
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.MANTICORE_INDEX_MAIN
# Create the search query
if "query" in query_params:
query = query_params["query"]
search_query = self.construct_query(query, size, index)
query_created = True
else:
if custom_query:
search_query = custom_query
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, index, blank=True)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
# Check for a source
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
total_count += 1
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if not total_count == total_sources:
add_top.append(add_top_tmp)
# Date/time range
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
range_query = {
"range": {
"ts": {
"gt": from_ts,
"lt": to_ts,
}
}
}
add_top.append(range_query)
# Sorting
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
{
"ts": {
"order": sorting,
}
}
]
# Sentiment handling
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = {
"match": {
"sentiment": None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
add_top_negative.append(range_query_precise)
if add_bool:
# if "bool" not in search_query["query"]:
# search_query["query"]["bool"] = {}
# if "must" not in search_query["query"]["bool"]:
# search_query["query"]["bool"] = {"must": []}
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match": item})
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
search_query,
)
if not results:
message = "Error running query"
message_class = "danger"
return {"message": message, "class": message_class}
# results = results.to_dict()
if "error" in results:
message = results["error"]
message_class = "danger"
return {"message": message, "class": message_class}
results_parsed = parse_results(results)
if annotate:
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse:
results_parsed = results_parsed[::-1]
if dedup:
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
return context

View File

@@ -1,5 +1,5 @@
from datetime import datetime
import ast
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
@@ -92,6 +92,11 @@ def parse_results(results, meta=None):
for field in list(element.keys()):
if element[field] == "":
del element[field]
# Unfold the tokens
if "tokens" in element:
if element["tokens"].startswith('["') or element["tokens"].startswith("['"):
tokens_parsed = ast.literal_eval(element["tokens"])
element["tokens"] = tokens_parsed
# Split the timestamp into date and time
if "ts" not in element:

View File

@@ -2,6 +2,7 @@ from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.core.exceptions import FieldDoesNotExist
from django.forms import ModelForm
from mixins.restrictions import RestrictedFormMixin
from core.db.storage import db
from core.lib.parsing import QueryError
@@ -12,36 +13,6 @@ from .models import NotificationRule, NotificationSettings, User
# flake8: noqa: E501
class RestrictedFormMixin:
"""
This mixin is used to restrict the queryset of a form to the current user.
The request object is passed from the view.
Fieldargs is used to pass additional arguments to the queryset filter.
"""
fieldargs = {}
def __init__(self, *args, **kwargs):
# self.fieldargs = {}
self.request = kwargs.pop("request")
super().__init__(*args, **kwargs)
for field in self.fields:
# Check it's not something like a CharField which has no queryset
if not hasattr(self.fields[field], "queryset"):
continue
model = self.fields[field].queryset.model
# Check if the model has a user field
try:
model._meta.get_field("user")
# Add the user to the queryset filters
self.fields[field].queryset = model.objects.filter(
user=self.request.user, **self.fieldargs.get(field, {})
)
except FieldDoesNotExist:
pass
class NewUserForm(UserCreationForm):
email = forms.EmailField(required=True)
@@ -117,6 +88,7 @@ class NotificationRuleForm(RestrictedFormMixin, ModelForm):
"url",
"service",
"policy",
"ingest",
"enabled",
)
help_texts = {
@@ -131,6 +103,7 @@ class NotificationRuleForm(RestrictedFormMixin, ModelForm):
"window": "Time window to search: 1d, 1h, 1m, 1s, etc.",
"amount": "Amount of matches to be returned for scheduled queries. Cannot be used with on-demand queries.",
"policy": "When to trigger this policy.",
"ingest": "Whether to ingest matches.",
}
def clean(self):

View File

@@ -4,7 +4,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should = []
extra_should2 = []
if num:
extra_must.append({"match_phrase": {"num": num}})
extra_must.append({"equals": {"num": num}})
if net:
extra_must.append({"match_phrase": {"net": net}})
if channel:
@@ -52,7 +52,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should.append({"match": {"nick": channel}})
else:
for ctype in types:
extra_should.append({"match": {"mtype": ctype}})
extra_should.append({"equals": {"mtype": ctype}})
else:
for ctype in types:
extra_should.append({"match": {"type": ctype}})
@@ -84,4 +84,5 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}})
return query

View File

@@ -56,9 +56,11 @@ def webhook_sendmsg(**kwargs):
msg = kwargs.get("msg", None)
notification_settings = kwargs.get("notification_settings")
url = notification_settings.get("url")
headers = {"Content-type": "application/json"}
try:
requests.post(
f"{url}",
headers=headers,
data=msg,
)
except requests.exceptions.ConnectionError as e:

View File

@@ -90,6 +90,7 @@ def parse_index(user, query_params, raise_error=False):
}
else:
index = settings.INDEX_MAIN
return index
@@ -98,6 +99,7 @@ def parse_source(user, query_params, raise_error=False):
if "source" in query_params:
source = query_params["source"]
# Validate permissions for restricted sources
if source in settings.SOURCES_RESTRICTED:
if not user.has_perm("core.restricted_sources"):
message = f"Access denied: {source}"
@@ -105,6 +107,8 @@ def parse_source(user, query_params, raise_error=False):
raise QueryError(message)
message_class = "danger"
return {"message": message, "class": message_class}
# Check validity of source
elif source not in settings.MAIN_SOURCES:
message = f"Invalid source: {source}"
if raise_error:
@@ -118,11 +122,17 @@ def parse_source(user, query_params, raise_error=False):
if source:
sources = [source]
else:
# Here we need to populate what "all" means for the user.
# They may only have access to a subset of the sources.
# We build a custom source list with ones they have access
# to, and then remove "all" from the list.
sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"):
# If the user can use restricted sources, add them in.
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
# Get rid of "all", it's just a meta-source
if "all" in sources:
sources.remove("all")

View File

@@ -9,6 +9,7 @@ except ImportError:
from yaml import Loader, Dumper
import uuid
from copy import deepcopy
from datetime import datetime
import orjson
@@ -44,14 +45,18 @@ def format_ntfy(**kwargs):
rule: The rule object, must be specified
index: The index the rule matched on, can be None
message: The message to send, can be None
meta:
matched: The matched fields, can be None
total_hits: The total number of matches, optional
"""
rule = kwargs.get("rule")
index = kwargs.get("index")
message = kwargs.get("message")
matched = kwargs.get("matched")
total_hits = kwargs.get("total_hits", 0)
meta = kwargs.get("meta", {})
total_hits = meta.get("total_hits", 0)
matched = meta.get("matched")
if message:
# Dump the message in YAML for readability
messages_formatted = ""
@@ -88,25 +93,22 @@ def format_webhook(**kwargs):
rule: The rule object, must be specified
index: The index the rule matched on, can be None
message: The message to send, can be None, but will be sent as None
meta:
matched: The matched fields, can be None, but will be sent as None
total_hits: The total number of matches, optional
notification_settings: The notification settings, must be specified
priority: The priority of the message, optional
topic: The topic of the message, optional
"""
rule = kwargs.get("rule")
index = kwargs.get("index")
# rule = kwargs.get("rule")
# index = kwargs.get("index")
message = kwargs.get("message")
matched = kwargs.get("matched")
total_hits = kwargs.get("total_hits", 0)
meta = kwargs.get("meta")
notification_settings = kwargs.get("notification_settings")
notify_message = {
"rule_id": rule.id,
"rule_name": rule.name,
"matched": matched,
"total_hits": total_hits,
"index": index,
"data": message,
"meta": meta,
}
if "priority" in notification_settings:
notify_message["priority"] = notification_settings["priority"]
@@ -144,20 +146,23 @@ def rule_notify(rule, index, message, meta=None):
# Don't send anything
return
# double sigh
message_copy = deepcopy(message)
for index, _ in enumerate(message_copy):
if "meta" in message_copy[index]:
del message_copy[index]["meta"]
# Create a cast we can reuse for the formatting helpers and sendmsg
cast = {
"title": title,
"user": rule.user,
"rule": rule,
"index": index,
"message": message,
"message": message_copy,
"notification_settings": notification_settings,
}
if meta:
if "matched" in meta:
cast["matched"] = meta["matched"]
if "total_hits" in meta:
cast["total_hits"] = meta["total_hits"]
cast["meta"] = meta
if rule.service == "ntfy":
cast["msg"] = format_ntfy(**cast)
@@ -229,6 +234,16 @@ class NotificationRuleData(object):
break
# Continue to next field
continue
if field == "tokens":
# Allow partial matches for tokens
for token in value:
if "tokens" in message:
if token.lower() in [x.lower() for x in message["tokens"]]:
matched[field] = token
# Break out of the token matching loop
break
# Continue to next field
continue
if field in message and message[field] in value:
# Do exact matches for all other fields
matched[field] = message[field]
@@ -320,7 +335,8 @@ class NotificationRuleData(object):
if not isinstance(matches, list):
matches = [matches]
matches_copy = matches.copy()
match_ts = datetime.utcnow().isoformat()
# match_ts = datetime.utcnow().isoformat()
match_ts = int(datetime.utcnow().timestamp())
batch_id = uuid.uuid4()
# Filter empty fields in meta
@@ -341,8 +357,9 @@ class NotificationRuleData(object):
:param index: the index to store the matches for
:param matches: the matches to store
"""
new_matches = self.reform_matches(index, matches, meta, mode)
await self.db.async_store_matches(new_matches)
# new_matches = self.reform_matches(index, matches, meta, mode)
if self.object.ingest:
await self.db.async_store_matches(matches)
def ingest_matches_sync(self, index, matches, meta, mode):
"""
@@ -350,8 +367,9 @@ class NotificationRuleData(object):
:param index: the index to store the matches for
:param matches: the matches to store
"""
new_matches = self.reform_matches(index, matches, meta, mode)
self.db.store_matches(new_matches)
# new_matches = self.reform_matches(index, matches, meta, mode)
if self.object.ingest:
self.db.store_matches(matches)
async def rule_matched(self, index, message, meta, mode):
"""
@@ -386,8 +404,11 @@ class NotificationRuleData(object):
if aggs_formatted:
meta["matched_aggs"] = aggs_formatted
rule_notify(self.object, index, message, meta)
meta["is_match"] = True
self.store_match(index, message)
message = self.reform_matches(index, message, meta, mode)
rule_notify(self.object, index, message, meta)
await self.ingest_matches(index, message, meta, mode)
def rule_matched_sync(self, index, message, meta, mode):
@@ -423,12 +444,15 @@ class NotificationRuleData(object):
if aggs_formatted:
meta["matched_aggs"] = aggs_formatted
rule_notify(self.object, index, message, meta)
meta["is_match"] = True
self.store_match(index, message)
message = self.reform_matches(index, message, meta, mode)
rule_notify(self.object, index, message, meta)
self.ingest_matches_sync(index, message, meta, mode)
# No async helper for this one as we only need it for schedules
async def rule_no_match(self, index=None, message=None):
async def rule_no_match(self, index=None, message=None, mode=None):
"""
A rule has not matched.
If the previous run did match, send a notification if configured to notify
@@ -455,11 +479,14 @@ class NotificationRuleData(object):
if self.policy in ["always", "change"]:
# Never notify for empty matches on default policy
rule_notify(self.object, index, "no_match", None)
meta = {"msg": message, "is_match": False}
matches = [{"msg": None}]
message = self.reform_matches(index, matches, meta, mode)
rule_notify(self.object, index, matches, meta)
await self.ingest_matches(
index=index,
matches=[{"msg": None}],
meta={"msg": message},
matches=matches,
meta=meta,
mode="schedule",
)
@@ -472,12 +499,16 @@ class NotificationRuleData(object):
response = await self.db.schedule_query_results(self)
if not response:
# No results in the result_map
await self.rule_no_match(message="No response from database")
await self.rule_no_match(
message="No response from database", mode="schedule"
)
return
for index, (meta, results) in response.items():
if not results:
# Falsy results, no matches
await self.rule_no_match(index, message="No results for index")
await self.rule_no_match(
index, message="No results for index", mode="schedule"
)
continue
# Add the match values of all aggregations to a list
@@ -496,7 +527,9 @@ class NotificationRuleData(object):
)
continue
# Default branch, since the happy path has a continue keyword
await self.rule_no_match(index, message="Aggregation did not match")
await self.rule_no_match(
index, message="Aggregation did not match", mode="schedule"
)
def test_schedule(self):
"""

View File

@@ -1,5 +1,6 @@
import msgpack
from django.core.management.base import BaseCommand
from django.conf import settings
from redis import StrictRedis
from core.db.storage import db
@@ -93,7 +94,13 @@ def process_rules(data):
class Command(BaseCommand):
def handle(self, *args, **options):
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=10) # To match Monolith DB
# r = StrictRedis(
# host=settings.REDIS_HOST,
# port=settings.REDIS_PORT,
# password=settings.REDIS_PASSWORD,
# db=settings.REDIS_DB
# )
p = r.pubsub()
p.psubscribe("messages")
for message in p.listen():

View File

@@ -44,8 +44,11 @@ class Command(BaseCommand):
for interval in INTERVALS:
log.debug(f"Scheduling {interval} second job")
scheduler.add_job(job, "interval", seconds=interval, args=[interval])
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
scheduler._eventloop = loop
scheduler.start()
loop = asyncio.get_event_loop()
try:
loop.run_forever()
except (KeyboardInterrupt, SystemExit):

View File

@@ -0,0 +1,28 @@
# Generated by Django 4.1.6 on 2023-02-13 10:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0026_notificationrule_policy_and_more'),
]
operations = [
migrations.AlterField(
model_name='notificationrule',
name='policy',
field=models.CharField(choices=[('default', 'Default: Trigger only when there were no results last time'), ('change', 'Change: Default + trigger when there are no results (if there were before)'), ('always', 'Always: Trigger on every run (not recommended for low intervals)')], default='default', max_length=255),
),
migrations.AlterField(
model_name='notificationrule',
name='topic',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='notificationsettings',
name='topic',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 4.1.6 on 2023-02-13 21:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0027_alter_notificationrule_policy_and_more'),
]
operations = [
migrations.RenameField(
model_name='notificationrule',
old_name='send_empty',
new_name='ingest',
),
migrations.AlterField(
model_name='notificationrule',
name='interval',
field=models.IntegerField(choices=[(0, 'On demand'), (5, 'Every 5 seconds'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=60),
),
migrations.AlterField(
model_name='notificationrule',
name='service',
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='webhook', max_length=255),
),
migrations.AlterField(
model_name='notificationrule',
name='window',
field=models.CharField(blank=True, default='30d', max_length=255, null=True),
),
]

View File

@@ -78,6 +78,7 @@ class User(AbstractUser):
"""
Override the save function to create a Stripe customer.
"""
if settings.BILLING_ENABLED:
if not self.stripe_id: # stripe ID not stored
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name)
@@ -89,11 +90,13 @@ class User(AbstractUser):
if self.last_name != self._original.last_name:
to_update["last_name"] = self.last_name
if settings.BILLING_ENABLED:
update_customer_fields(self.stripe_id, **to_update)
super().save(*args, **kwargs)
def delete(self, *args, **kwargs):
if settings.BILLING_ENABLED:
if self.stripe_id:
stripe.Customer.delete(self.stripe_id)
logger.info(f"Deleted Stripe customer {self.stripe_id}")
@@ -192,16 +195,18 @@ class NotificationRule(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=255)
priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1)
topic = models.CharField(max_length=255, null=True, blank=True)
topic = models.CharField(max_length=2048, null=True, blank=True)
url = models.CharField(max_length=1024, null=True, blank=True)
interval = models.IntegerField(choices=INTERVAL_CHOICES, default=0)
window = models.CharField(max_length=255, null=True, blank=True)
interval = models.IntegerField(choices=INTERVAL_CHOICES, default=60)
window = models.CharField(max_length=255, default="30d", null=True, blank=True)
amount = models.PositiveIntegerField(default=1, null=True, blank=True)
enabled = models.BooleanField(default=True)
data = models.TextField()
match = models.JSONField(null=True, blank=True)
service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy")
send_empty = models.BooleanField(default=False)
service = models.CharField(
choices=SERVICE_CHOICES, max_length=255, default="webhook"
)
ingest = models.BooleanField(default=False)
policy = models.CharField(choices=POLICY_CHOICES, max_length=255, default="default")
def __str__(self):
@@ -238,8 +243,6 @@ class NotificationRule(models.Model):
user_settings["url"] = self.url
if self.service is not None:
user_settings["service"] = self.service
if self.send_empty is not None:
user_settings["send_empty"] = self.send_empty
if check:
if user_settings["service"] == "ntfy" and user_settings["topic"] is None:
@@ -251,7 +254,7 @@ class NotificationRule(models.Model):
class NotificationSettings(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
topic = models.CharField(max_length=255, null=True, blank=True)
topic = models.CharField(max_length=2048, null=True, blank=True)
url = models.CharField(max_length=1024, null=True, blank=True)
service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy")

View File

@@ -70,7 +70,7 @@ $(document).ready(function(){
"index": "off",
"meta": "off",
"match_ts": "off",
"batch_id": "off",
"batch_id": "off"
//"lang_name": "off",
// "words_noun": "off",
// "words_adj": "off",

View File

@@ -1,8 +1,10 @@
{% load static %}
{% load has_plan %}
{% load cache %}
<!DOCTYPE html>
<html lang="en-GB">
{% cache 600 head request.path_info %}
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
@@ -235,8 +237,9 @@
<!-- End Piwik Code -->
</head>
{% endcache %}
<body>
{% cache 600 nav request.user.id %}
<nav class="navbar" role="navigation" aria-label="main navigation">
<div class="navbar-brand">
<a class="navbar-item" href="{% url 'home' %}">
@@ -277,7 +280,7 @@
{% if user.is_superuser %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Threshold
Manage
</a>
<div class="navbar-dropdown">
@@ -287,6 +290,9 @@
<a class="navbar-item" href="#">
Discord
</a>
<a class="navbar-item" href="{% url 'monolith_stats' %}">
Stats
</a>
</div>
</div>
{% endif %}
@@ -334,6 +340,7 @@
</div>
</div>
</nav>
{% endcache %}
<script>
let deferredPrompt;
const addBtn = document.querySelector('.add-button');

View File

@@ -0,0 +1,15 @@
{% extends "base.html" %}
{% block content %}
<div
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{% url 'monolith_stats_db' type='page' %}"
hx-trigger="load, every 5s"
hx-target="#stats"
hx-swap="innerHTML">
</div>
<div class="box">
<div id="stats">
</div>
</div>
{% endblock %}

View File

@@ -0,0 +1,14 @@
{% extends 'mixins/partials/generic-detail.html' %}
{% block tbody %}
{% for item in object %}
{% if item.data %}
{% for row in item.data %}
<tr>
<th>{{ row.Variable_name }}</th>
<td>{{ row.Value }}</td>
</tr>
{% endfor %}
{% endif %}
{% endfor %}
{% endblock %}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/modal.html' %}
{% extends 'mixins/wm/modal.html' %}
{% load index %}
{% load static %}
@@ -42,7 +42,7 @@
{% endblock %}
{% block modal_content %}
{% include 'partials/notify.html' %}
{% include 'mixins/partials/notify.html' %}
<div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}">
<ul>
<li class="is-active" data-tab="1">

View File

@@ -1,4 +1,4 @@
{% extends 'wm/modal.html' %}
{% extends 'mixins/wm/modal.html' %}
{% block modal_content %}
{% include 'window-content/drilldown.html' %}

View File

@@ -1 +0,0 @@
<button class="modal-close is-large" aria-label="close"></button>

View File

@@ -1,3 +0,0 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
onclick='grid.removeWidget("widget-{{ unique }}");'></i>

View File

@@ -1,3 +0,0 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
data-script="on click remove the closest <nav/>"></i>

View File

@@ -1,8 +1,9 @@
{% load static %}
{% load cache %}
{% load cachalot cache %}
{% get_last_invalidation 'core.Plan' as last %}
{% cache 600 objects_plans request.user.id plans last %}
{% for plan in plans %}
<div class="box">
<article class="media">
<div class="media-left">
@@ -44,5 +45,4 @@
</article>
</div>
{% endfor %}
{% endcache %}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/widget.html' %}
{% extends 'mixins/wm/widget.html' %}
{% load static %}
{% block heading %}
@@ -6,8 +6,7 @@
{% endblock %}
{% block panel_content %}
{% include 'partials/notify.html' %}
<script src="{% static 'js/column-shifter.js' %}"></script>
{% include 'mixins/partials/notify.html' %}
{% if cache is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
<i class="fa-solid fa-database"></i>

View File

@@ -5,7 +5,9 @@
{% load urlsafe %}
{% load pretty %}
{% load splitstr %}
{% load cache %}
{% cache 3600 results_table_full request.user.id table %}
{% block table-wrapper %}
<script src="{% static 'js/column-shifter.js' %}"></script>
<div id="drilldown-table" class="column-shifter-container" style="position:relative; z-index:1;">
@@ -172,10 +174,11 @@
</td>
{% elif column.name == 'match_ts' %}
<td class="{{ column.name }}">
{% with match_ts=cell|splitstr:'T' %}
<!-- {# with match_ts=cell|splitstr:'T' %}
<p>{{ match_ts.0 }}</p>
<p>{{ match_ts.1 }}</p>
{% endwith %}
{% endwith #} -->
<p>{{ match_ts }}</p>
</td>
{% elif column.name == 'type' or column.name == 'mtype' %}
<td class="{{ column.name }}">
@@ -387,7 +390,7 @@
<td class="{{ column.name }}">
<pre class="small-field" style="cursor: pointer;">{{ cell|pretty }}</pre>
</td>
{% elif 'id' in column.name %}
{% elif 'id' in column.name and column.name != "ident" %}
<td class="{{ column.name }}">
<div class="buttons">
<div class="nowrap-parent">
@@ -531,3 +534,4 @@
{% endblock pagination %}
</div>
{% endblock table-wrapper %}
{% endcache %}

View File

@@ -1,5 +1,8 @@
{% include 'partials/notify.html' %}
{% load cache %}
{% load cachalot cache %}
{% get_last_invalidation 'core.NotificationRule' as last %}
{% include 'mixins/partials/notify.html' %}
{% cache 600 objects_rules request.user.id object_list last %}
<table
class="table is-fullwidth is-hoverable"
hx-target="#{{ context_object_name }}-table"
@@ -16,13 +19,14 @@
<th>priority</th>
<th>topic</th>
<th>enabled</th>
<th>ingest</th>
<th>data length</th>
<th>match</th>
<th>actions</th>
</thead>
{% for item in object_list %}
<tr>
<td><a href="/search/?rule={{ item.id }}&query=*&source=all">{{ item.id }}</a></td>
<td><a href="/?query=*&source=all&rule={{ item.id }}">{{ item.id }}</a></td>
<td>{{ item.user }}</td>
<td>{{ item.name }}</td>
<td>{{ item.interval }}s</td>
@@ -40,6 +44,17 @@
</span>
{% endif %}
</td>
<td>
{% if item.ingest %}
<span class="icon">
<i class="fa-solid fa-check"></i>
</span>
{% else %}
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
{% endif %}
</td>
<td>{{ item.data|length }}</td>
<td>{{ item.matches }}</td>
<td>
@@ -91,3 +106,4 @@
{% endfor %}
</table>
{% endcache %}

View File

@@ -3,7 +3,7 @@
</div>
{% if params.index != 'int' and params.index != 'meta' %}
<div id="sentiment-container" {% if params.show_sentiment is None %} class="is-hidden" {% endif %}>
<div id="sentiment-container" {% if params.graph is None %} class="is-hidden" {% endif %}>
<canvas id="sentiment-chart"></canvas>
</div>
<script src="{% static 'chart.js' %}"></script>

View File

@@ -19,7 +19,7 @@
hx-swap="outerHTML">
</div>
<div id="info">
{% include 'partials/notify.html' %}
{% include 'mixins/partials/notify.html' %}
{% if item is not None %}
<div class="content" style="max-height: 30em; overflow: auto;">
<div class="table-container">

View File

@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
{% block content %}
{% include 'partials/notify.html' %}
{% include 'mixins/partials/notify.html' %}
<script src="{% static 'tabs.js' %}"></script>
<style>
.icon { border-bottom: 0px !important;}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/widget.html' %}
{% extends 'mixins/wm/widget.html' %}
{% block widget_options %}

View File

@@ -1,34 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
{% load crispy_forms_tags %}
{% load crispy_forms_bulma_field %}
<form
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{{ submit_url }}"
hx-target="#modals-here"
hx-swap="innerHTML">
{% csrf_token %}
{{ form|crispy }}
{% if hide_cancel is not True %}
<button
type="button"
class="button is-light modal-close-button">
Cancel
</button>
{% endif %}
<button type="submit" class="button modal-close-button">Submit</button>
</form>

View File

@@ -1,45 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
<div class="buttons">
{% if submit_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{{ submit_url }}"
hx-trigger="click"
hx-target="#modals-here"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-plus"></i>
</span>
<span>{{ title_singular }}</span>
</span>
</button>
{% endif %}
{% if delete_all_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{{ delete_all_url }}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
<span>Delete all {{ context_object_name }} </span>
</span>
</button>
{% endif %}
</div>
{% include detail_template %}

View File

@@ -1,45 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
<div class="buttons">
{% if submit_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{{ submit_url }}"
hx-trigger="click"
hx-target="#modals-here"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-plus"></i>
</span>
<span>{{ title_singular }}</span>
</span>
</button>
{% endif %}
{% if delete_all_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{{ delete_all_url }}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
<span>Delete all {{ context_object_name }} </span>
</span>
</button>
{% endif %}
</div>
{% include list_template %}

View File

@@ -1,6 +1,6 @@
{% load static %}
{% include 'partials/notify.html' %}
{% include 'mixins/partials/notify.html' %}
{% if cache is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
<i class="fa-solid fa-database"></i>

View File

@@ -258,7 +258,7 @@
id="sentiment_graph_switch"
type="checkbox"
class="switch is-rounded is-info"
name="show_sentiment"
name="graph"
data-script="on click toggle .is-hidden on #sentiment-container">
<label

View File

@@ -1,4 +1,4 @@
{% extends 'wm/window.html' %}
{% extends 'mixins/wm/window.html' %}
{% block heading %}
Drilldown

View File

@@ -1,20 +0,0 @@
{% load static %}
<script src="{% static 'modal.js' %}"></script>
{% block scripts %}
{% endblock %}
{% block styles %}
{% endblock %}
<div id="modal" class="modal is-active is-clipped">
<div class="modal-background"></div>
<div class="modal-content">
<div class="box">
{% block modal_content %}
{% include window_content %}
{% endblock %}
{% include 'partials/close-modal.html' %}
</div>
</div>
</div>

View File

@@ -1,6 +0,0 @@
{% extends "base.html" %}
{% block content %}
{% include window_content %}
{% endblock %}

View File

@@ -1,17 +0,0 @@
<nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %}
{% include 'partials/close-window.html' %}
{% endblock %}
{% block heading %}
{% endblock %}
</p>
<article class="panel-block is-active">
<div class="control">
{% block panel_content %}
{% endblock %}
</div>
</article>
</nav>

View File

@@ -1,37 +0,0 @@
<div id="widget">
<div id="widget-{{ unique }}" class="grid-stack-item" {% block widget_options %}gs-w="10" gs-h="1" gs-y="10" gs-x="1"{% endblock %}>
<div class="grid-stack-item-content">
<nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %}
{% include 'partials/close-widget.html' %}
{% endblock %}
<i
class="fa-solid fa-arrows-minimize has-text-grey-light float-right"
onclick='grid.compact();'></i>
{% block heading %}
{{ title }}
{% endblock %}
</p>
<article class="panel-block is-active">
<div class="control">
{% block panel_content %}
{% include window_content %}
{% endblock %}
</div>
</article>
</nav>
</div>
</div>
</div>
<script>
{% block custom_script %}
{% endblock %}
var widget_event = new Event('load-widget');
document.dispatchEvent(widget_event);
</script>
{% block custom_end %}
{% endblock %}

View File

@@ -1,10 +0,0 @@
<magnet-block attract-distance="10" align-to="outer|center" class="floating-window">
{% extends 'wm/panel.html' %}
{% block heading %}
{{ title }}
{% endblock %}
{% block panel_content %}
{% include window_content %}
{% endblock %}
</magnet-block>

View File

@@ -5,4 +5,6 @@ register = template.Library()
@register.filter
def splitstr(value, arg):
if type(value) == int:
raise Exception(f"Attempt to split {value} with separator {arg}")
return value.split(arg)

View File

@@ -1,753 +0,0 @@
# import re
# from base64 import b64encode
# from random import randint
# from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
# from cryptography.hazmat.primitives.ciphers.modes import ECB
# from django.conf import settings
# from siphashc import siphash
# from sortedcontainers import SortedSet
import uuid
# from core import r
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import Paginator
from django.db.models import QuerySet
from django.http import Http404, HttpResponse, HttpResponseBadRequest
from django.urls import reverse
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from rest_framework.parsers import FormParser
from core.util import logs
log = logs.get_logger(__name__)
class RestrictedViewMixin:
"""
This mixin overrides two helpers in order to pass the user object to the filters.
get_queryset alters the objects returned for list views.
get_form_kwargs passes the request object to the form class. Remaining permissions
checks are in forms.py
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = "page"
ordering = None
def get_queryset(self, **kwargs):
"""
This function is overriden to filter the objects by the requesting user.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
# queryset = queryset.all()
queryset = queryset.filter(user=self.request.user)
elif self.model is not None:
queryset = self.model._default_manager.filter(user=self.request.user)
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {"cls": self.__class__.__name__}
)
if hasattr(self, "get_ordering"):
ordering = self.get_ordering()
if ordering:
if isinstance(ordering, str):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_form_kwargs(self):
"""Passes the request object to the form class.
This is necessary to only display members that belong to a given user"""
kwargs = super().get_form_kwargs()
kwargs["request"] = self.request
return kwargs
class ObjectNameMixin(object):
def __init__(self, *args, **kwargs):
if self.model is None:
self.title = self.context_object_name.title()
self.title_singular = self.context_object_name_singular.title()
else:
self.title_singular = self.model._meta.verbose_name.title() # Hook
self.context_object_name_singular = self.title_singular.lower() # hook
self.title = self.model._meta.verbose_name_plural.title() # Hooks
self.context_object_name = self.title.lower() # hooks
self.context_object_name = self.context_object_name.replace(" ", "")
self.context_object_name_singular = (
self.context_object_name_singular.replace(" ", "")
)
super().__init__(*args, **kwargs)
class ObjectList(RestrictedViewMixin, ObjectNameMixin, ListView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/objects.html"
list_template = None
page_title = None
page_subtitle = None
list_url_name = None
# WARNING: TAKEN FROM locals()
list_url_args = ["type"]
submit_url_name = None
delete_all_url_name = None
widget_options = None
# copied from BaseListView
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.request = request
self.object_list = self.get_queryset(**kwargs)
if isinstance(self.object_list, HttpResponse):
return self.object_list
if isinstance(self.object_list, HttpResponseBadRequest):
return self.object_list
allow_empty = self.get_allow_empty()
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
list_url_args = {}
for arg in self.list_url_args:
if arg in locals():
list_url_args[arg] = locals()[arg]
elif arg in kwargs:
list_url_args[arg] = kwargs[arg]
orig_type = type
if type == "page":
type = "modal"
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if self.get_paginate_by(self.object_list) is not None and hasattr(
self.object_list, "exists"
):
is_empty = not self.object_list.exists()
else:
is_empty = not self.object_list
if is_empty:
raise Http404("Empty list")
context = self.get_context_data()
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["list_template"] = self.list_template
context["page_title"] = self.page_title
context["page_subtitle"] = self.page_subtitle
context["type"] = type
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
if self.submit_url_name is not None:
context["submit_url"] = reverse(self.submit_url_name, kwargs={"type": type})
if self.list_url_name is not None:
context["list_url"] = reverse(self.list_url_name, kwargs=list_url_args)
if self.delete_all_url_name:
context["delete_all_url"] = reverse(self.delete_all_url_name)
if self.widget_options:
context["widget_options"] = self.widget_options
# Return partials for HTMX
if self.request.htmx:
if request.headers["HX-Target"] == self.context_object_name + "-table":
self.template_name = self.list_template
elif orig_type == "page":
self.template_name = self.list_template
else:
context["window_content"] = self.list_template
return self.render_to_response(context)
class ObjectCreate(RestrictedViewMixin, ObjectNameMixin, CreateView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object-form.html"
parser_classes = [FormParser]
page_title = None
page_subtitle = None
model = None
submit_url_name = None
submit_url_args = ["type"]
request = None
# Whether to hide the cancel button in the form
hide_cancel = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = "Create " + self.context_object_name_singular
def post_save(self, obj):
pass
def form_valid(self, form):
obj = form.save(commit=False)
if self.request is None:
raise Exception("Request is None")
obj.user = self.request.user
obj.save()
form.save_m2m()
self.post_save(obj)
context = {"message": "Object created", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.get(self.request, **self.kwargs, form=form)
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
self.request = request
self.kwargs = kwargs
if type == "widget":
self.hide_cancel = True
if type == "page":
type = "modal"
self.object = None
submit_url_args = {}
for arg in self.submit_url_args:
if arg in locals():
submit_url_args[arg] = locals()[arg]
elif arg in kwargs:
submit_url_args[arg] = kwargs[arg]
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
context = self.get_context_data()
form = kwargs.get("form", None)
if form:
context["form"] = form
context["unique"] = unique
context["window_content"] = self.window_content
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
context["submit_url"] = submit_url
context["type"] = type
context["hide_cancel"] = self.hide_cancel
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
response = self.render_to_response(context)
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def post(self, request, *args, **kwargs):
self.request = request
self.template_name = "partials/notify.html"
return super().post(request, *args, **kwargs)
class ObjectRead(RestrictedViewMixin, ObjectNameMixin, DetailView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object.html"
detail_template = "partials/generic-detail.html"
page_title = None
page_subtitle = None
model = None
# submit_url_name = None
detail_url_name = None
# WARNING: TAKEN FROM locals()
detail_url_args = ["type"]
request = None
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest()
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
detail_url_args = {}
for arg in self.detail_url_args:
if arg in locals():
detail_url_args[arg] = locals()[arg]
elif arg in kwargs:
detail_url_args[arg] = kwargs[arg]
self.request = request
self.object = self.get_object(**kwargs)
if isinstance(self.object, HttpResponse):
return self.object
orig_type = type
if type == "page":
type = "modal"
context = self.get_context_data()
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["detail_template"] = self.detail_template
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
context["type"] = type
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
if self.detail_url_name is not None:
context["detail_url"] = reverse(
self.detail_url_name, kwargs=detail_url_args
)
# Return partials for HTMX
if self.request.htmx:
if request.headers["HX-Target"] == self.context_object_name + "-info":
self.template_name = self.detail_template
elif orig_type == "page":
self.template_name = self.detail_template
else:
context["window_content"] = self.detail_template
return self.render_to_response(context)
class ObjectUpdate(RestrictedViewMixin, ObjectNameMixin, UpdateView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object-form.html"
parser_classes = [FormParser]
page_title = None
page_subtitle = None
model = None
submit_url_name = None
submit_url_args = ["type", "pk"]
request = None
# Whether pk is required in the get request
pk_required = True
# Whether to hide the cancel button in the form
hide_cancel = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = "Update " + self.context_object_name_singular
def post_save(self, obj):
pass
def form_valid(self, form):
obj = form.save(commit=False)
if self.request is None:
raise Exception("Request is None")
obj.save()
form.save_m2m()
self.post_save(obj)
context = {"message": "Object updated", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.get(self.request, **self.kwargs, form=form)
def get(self, request, *args, **kwargs):
self.request = request
type = kwargs.get("type", None)
pk = kwargs.get("pk", None)
if not type:
return HttpResponseBadRequest("No type specified")
if not pk:
if self.pk_required:
return HttpResponseBadRequest("No pk specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
if type == "widget":
self.hide_cancel = True
if type == "page":
type = "modal"
self.object = self.get_object()
submit_url_args = {}
for arg in self.submit_url_args:
if arg in locals():
submit_url_args[arg] = locals()[arg]
elif arg in kwargs:
submit_url_args[arg] = kwargs[arg]
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
context = self.get_context_data()
form = kwargs.get("form", None)
if form:
context["form"] = form
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
context["submit_url"] = submit_url
context["type"] = type
context["hide_cancel"] = self.hide_cancel
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
response = self.render_to_response(context)
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def post(self, request, *args, **kwargs):
self.request = request
self.template_name = "partials/notify.html"
return super().post(request, *args, **kwargs)
class ObjectDelete(RestrictedViewMixin, ObjectNameMixin, DeleteView):
model = None
template_name = "partials/notify.html"
# Overriden to prevent success URL from being used
def delete(self, request, *args, **kwargs):
"""
Call the delete() method on the fetched object and then redirect to the
success URL.
"""
self.object = self.get_object()
# success_url = self.get_success_url()
self.object.delete()
context = {"message": "Object deleted", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
# This will be used in newer Django versions, until then we get a warning
def form_valid(self, form):
"""
Call the delete() method on the fetched object.
"""
self.object = self.get_object()
self.object.delete()
context = {"message": "Object deleted", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
# from random import randint
# from timeit import timeit
# entries = 10000
# a = [
# {'ts': "sss", 'msg': randint(1, 2), str(randint(1, 2)): \
# randint(1, 2)} for x in range(entries)
# ]
# kk = ["msg", "nick"]
# call = lambda: dedup_list(a, kk)
# #print(timeit(call, number=10))
# print(dedup_list(a, kk))
# # sh-5.1$ python helpers.py
# # 1.0805372429895215
# def base36encode(number, alphabet="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
# """Converts an integer to a base36 string."""
# if not isinstance(number, (int)):
# raise TypeError("number must be an integer")
# base36 = ""
# sign = ""
# if number < 0:
# sign = "-"
# number = -number
# if 0 <= number < len(alphabet):
# return sign + alphabet[number]
# while number != 0:
# number, i = divmod(number, len(alphabet))
# base36 = alphabet[i] + base36
# return sign + base36
# def base36decode(number):
# return int(number, 36)
# def randomise_list(user, data):
# """
# Randomise data in a list of dictionaries.
# """
# if user.has_perm("core.bypass_randomisation"):
# return
# if isinstance(data, list):
# for index, item in enumerate(data):
# for key, value in item.items():
# if key in settings.RANDOMISE_FIELDS:
# if isinstance(value, int):
# min_val = value - (value * settings.RANDOMISE_RATIO)
# max_val = value + (value * settings.RANDOMISE_RATIO)
# new_val = randint(int(min_val), int(max_val))
# data[index][key] = new_val
# elif isinstance(data, dict):
# for key, value in data.items():
# # if key in settings.RANDOMISE_FIELDS:
# if isinstance(value, int):
# min_val = value - (value * settings.RANDOMISE_RATIO)
# max_val = value + (value * settings.RANDOMISE_RATIO)
# new_val = randint(int(min_val), int(max_val))
# data[key] = new_val
# def obfuscate_list(user, data):
# """
# Obfuscate data in a list of dictionaries.
# """
# if user.has_perm("core.bypass_obfuscation"):
# return
# for index, item in enumerate(data):
# for key, value in item.items():
# # Obfuscate a ratio of the field
# if key in settings.OBFUSCATE_FIELDS:
# length = len(value) - 1
# split = int(length * settings.OBFUSCATE_KEEP_RATIO)
# first_part = value[:split]
# second_part = value[split:]
# second_len = len(second_part)
# second_part = "*" * second_len
# data[index][key] = first_part + second_part
# # Obfuscate value based on fields
# # Example: 2022-02-02 -> 2022-02-**
# # 14:11:12 -> 14:11:**
# elif key in settings.OBFUSCATE_FIELDS_SEP:
# if "-" in value:
# sep = "-"
# value_spl = value.split("-")
# hide_num = settings.OBFUSCATE_DASH_NUM
# elif ":" in value:
# sep = ":"
# value_spl = value.split(":")
# hide_num = settings.OBFUSCATE_COLON_NUM
# first_part = value_spl[:hide_num]
# second_part = value_spl[hide_num:]
# for index_x, x in enumerate(second_part):
# x_len = len(x)
# second_part[index_x] = "*" * x_len
# result = sep.join([*first_part, *second_part])
# data[index][key] = result
# for key in settings.COMBINE_FIELDS:
# for index, item in enumerate(data):
# if key in item:
# k1, k2 = settings.COMBINE_FIELDS[key]
# if k1 in item and k2 in item:
# data[index][key] = item[k1] + item[k2]
# def hash_list(user, data, hash_keys=False):
# """
# Hash a list of dicts or a list with SipHash42.
# """
# if user.has_perm("core.bypass_hashing"):
# return
# cache = "cache.hash"
# hash_table = {}
# if isinstance(data, dict):
# data_copy = [{x: data[x]} for x in data]
# else:
# data_copy = type(data)((data))
# for index, item in enumerate(data_copy):
# if "src" in item:
# if item["src"] in settings.SAFE_SOURCES:
# continue
# if isinstance(item, dict):
# for key, value in list(item.items()):
# if (
# key not in settings.WHITELIST_FIELDS
# and key not in settings.NO_OBFUSCATE_PARAMS
# ):
# if isinstance(value, int):
# value = str(value)
# if isinstance(value, bool):
# continue
# if value is None:
# continue
# if hash_keys:
# hashed = siphash(settings.HASHING_KEY, key)
# else:
# hashed = siphash(settings.HASHING_KEY, value)
# encoded = base36encode(hashed)
# if encoded not in hash_table:
# if hash_keys:
# hash_table[encoded] = key
# else:
# hash_table[encoded] = value
# if hash_keys:
# # Rename the dict key
# data[encoded] = data.pop(key)
# else:
# data[index][key] = encoded
# elif isinstance(item, str):
# hashed = siphash(settings.HASHING_KEY, item)
# encoded = base36encode(hashed)
# if encoded not in hash_table:
# hash_table[encoded] = item
# data[index] = encoded
# if hash_table:
# r.hmset(cache, hash_table)
# def hash_lookup(user, data_dict, supplementary_data=None):
# cache = "cache.hash"
# hash_list = SortedSet()
# denied = []
# for key, value in list(data_dict.items()):
# if "source" in data_dict:
# if data_dict["source"] in settings.SAFE_SOURCES:
# continue
# if "src" in data_dict:
# if data_dict["src"] in settings.SAFE_SOURCES:
# continue
# if supplementary_data:
# if "source" in supplementary_data:
# if supplementary_data["source"] in settings.SAFE_SOURCES:
# continue
# if key in settings.SEARCH_FIELDS_DENY:
# if not user.has_perm("core.bypass_hashing"):
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# if (
# key not in settings.WHITELIST_FIELDS
# and key not in settings.NO_OBFUSCATE_PARAMS
# ):
# if not value:
# continue
# # hashes = re.findall("\|([^\|]*)\|", value) # noqa
# if isinstance(value, str):
# hashes = re.findall("[A-Z0-9]{12,13}", value)
# elif isinstance(value, dict):
# hashes = []
# for key, value in value.items():
# if not value:
# continue
# hashes_iter = re.findall("[A-Z0-9]{12,13}", value)
# for h in hashes_iter:
# hashes.append(h)
# if not hashes:
# # Otherwise the user could inject plaintext search queries
# if not user.has_perm("core.bypass_hashing"):
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# continue
# else:
# # There are hashes here but there shouldn't be!
# if key in settings.TAG_SEARCH_DENY:
# data_dict[key] = LookupDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# continue
# for hash in hashes:
# hash_list.add(hash)
# if hash_list:
# values = r.hmget(cache, *hash_list)
# if not values:
# return
# for index, val in enumerate(values):
# if val is None:
# values[index] = b"ERR"
# values = [x.decode() for x in values]
# total = dict(zip(hash_list, values))
# for key in data_dict.keys():
# for hash in total:
# if data_dict[key]:
# if isinstance(data_dict[key], str):
# if hash in data_dict[key]:
# data_dict[key] = data_dict[key].replace(
# f"{hash}", total[hash]
# )
# elif isinstance(data_dict[key], dict):
# for k2, v2 in data_dict[key].items():
# if hash in v2:
# data_dict[key][k2] = v2.repl
# ace(f"{hash}", total[hash])
# return denied
# def encrypt_list(user, data, secret):
# if user.has_perm("core.bypass_encryption"):
# return
# cipher = Cipher(algorithms.AES(secret), ECB())
# for index, item in enumerate(data):
# for key, value in item.items():
# if key not in settings.WHITELIST_FIELDS:
# encryptor = cipher.encryptor()
# if isinstance(value, int):
# value = str(value)
# if isinstance(value, bool):
# continue
# if value is None:
# continue
# decoded = value.encode("utf8", "replace")
# length = 16 - (len(decoded) % 16)
# decoded += bytes([length]) * length
# ct = encryptor.update(decoded) + encryptor.finalize()
# final_str = b64encode(ct)
# data[index][key] = final_str.decode("utf-8", "replace")

View File

View File

@@ -0,0 +1,36 @@
from django.shortcuts import render
from django.views import View
from rest_framework.parsers import FormParser
from rest_framework.views import APIView
from core.db.storage import db
from mixins.views import ObjectRead
from core.views.manage.permissions import SuperUserRequiredMixin
class MonolithStats(SuperUserRequiredMixin, View):
template_name = "manage/monolith/stats/index.html"
def get(self, request):
return render(request, self.template_name)
class MonolithDBStats(SuperUserRequiredMixin, ObjectRead):
detail_template = "manage/monolith/stats/overview.html"
context_object_name_singular = "Status"
context_object_name = "Status"
detail_url_name = "monolith_stats_db"
detail_url_args = ["type"]
def get_object(self, **kwargs):
search_query = "SHOW TABLE main STATUS"
stats = db.run_query(
self.request.user,
search_query=search_query,
path="sql?mode=raw",
raw=True,
#method="get",
)
return stats

View File

@@ -1,12 +1,12 @@
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.shortcuts import render
from mixins.views import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
from rest_framework.views import APIView
from core.db.storage import db
from core.forms import NotificationRuleForm, NotificationSettingsForm
from core.lib.rules import NotificationRuleData
from core.models import NotificationRule, NotificationSettings
from core.views.helpers import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
# Notifications - we create a new notification settings object if there isn't one
@@ -70,7 +70,7 @@ class RuleClear(LoginRequiredMixin, PermissionRequiredMixin, APIView):
permission_required = "use_rules"
def post(self, request, type, pk):
template_name = "partials/notify.html"
template_name = "mixins/partials/notify.html"
rule = NotificationRule.objects.get(pk=pk, user=request.user)
if isinstance(rule.match, dict):
for index in rule.match:

View File

@@ -81,15 +81,21 @@ def make_graph(results):
graph = []
for index, item in enumerate(results):
date = str(index)
sentiment = None
if "meta" in item:
if "aggs" in item["meta"]:
if "avg_sentiment" in item["meta"]["aggs"]:
sentiment = item["meta"]["aggs"]["avg_sentiment"]["value"]
else:
if "sentiment" in item:
sentiment = item["sentiment"]
graph.append(
{
"text": item.get("words_noun", None)
or item.get("msg", None)
or item.get("id"),
"text": item.get("msg", None) or item.get("id"),
"nick": item.get("nick", None),
"channel": item.get("channel", None),
"net": item.get("net", None),
"value": item.get("sentiment", None) or None,
"value": sentiment,
"date": date,
}
)
@@ -98,10 +104,11 @@ def make_graph(results):
class DrilldownTableView(SingleTableView):
table_class = DrilldownTable
template_name = "wm/widget.html"
template_name = "mixins/wm/widget.html"
window_content = "window-content/results.html"
# htmx_partial = "partials/"
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
widget_options = 'gs-w="10" gs-h="1" gs-y="10" gs-x="1"'
def common_request(self, request, **kwargs):
extra_params = {}
@@ -112,16 +119,20 @@ class DrilldownTableView(SingleTableView):
sizes = settings.MAIN_SIZES
if request.GET:
print("GET")
self.template_name = "index.html"
# GET arguments in URL like ?query=xyz
query_params = request.GET.dict()
print("QUERY_PARAMS GET", query_params)
if request.htmx:
if request.resolver_match.url_name == "search_partial":
self.template_name = "partials/results_table.html"
elif request.POST:
print("POST")
query_params = request.POST.dict()
else:
self.template_name = "index.html"
print("FRESH")
# No query, this is a fresh page load
# Don't try to search, since there's clearly nothing to do
params_with_defaults = {}
@@ -130,6 +141,7 @@ class DrilldownTableView(SingleTableView):
"sizes": sizes,
"params": params_with_defaults,
"unique": "results",
"widget_options": self.widget_options,
"window_content": self.window_content,
"title": "Results",
}
@@ -187,6 +199,7 @@ class DrilldownTableView(SingleTableView):
# We don't want a random one since we only want one results pane.
context["unique"] = "results"
context["window_content"] = self.window_content
context["widget_options"] = self.widget_options
context["title"] = "Results"
# Valid sizes
@@ -209,9 +222,9 @@ class DrilldownTableView(SingleTableView):
# Still push the URL so they can share it to get assistance
if request.GET:
if request.htmx:
response["HX-Push"] = reverse("home") + "?" + url_params
response["HX-Replace-Url"] = reverse("home") + "?" + url_params
elif request.POST:
response["HX-Push"] = reverse("home") + "?" + url_params
response["HX-Replace-Url"] = reverse("home") + "?" + url_params
return response
# Create data for chart.js sentiment graph
@@ -265,7 +278,7 @@ class DrilldownTableView(SingleTableView):
response = self.render_to_response(context)
# if not request.method == "GET":
if "client_uri" in context:
response["HX-Push"] = reverse("home") + "?" + context["client_uri"]
response["HX-Replace-Url"] = reverse("home") + "?" + context["client_uri"]
return response
def post(self, request, *args, **kwargs):

579
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,579 @@
version: "2.2"
services:
app:
image: xf/neptune:latest
container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
# env_file:
# - stack.env
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
processing:
image: xf/neptune:latest
container_name: processing_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py processing'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
scheduling:
image: xf/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
migration:
image: xf/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
# networks:
# - default
# - xf
# - db
network_mode: host
collectstatic:
image: xf/neptune:latest
container_name: collectstatic_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
# volumes_from:
# - tmp
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
# networks:
# - default
# - xf
# - db
network_mode: host
# nginx:
# image: nginx:latest
# container_name: nginx_neptune
# ports:
# - ${APP_PORT}:9999
# ulimits:
# nproc: 65535
# nofile:
# soft: 65535
# hard: 65535
# volumes:
# - ${PORTAINER_GIT_DIR}:/code
# - ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
# - neptune_static:${STATIC_ROOT}
# # volumes_from:
# # - tmp
# networks:
# - default
# - xf
# depends_on:
# app:
# condition: service_started
# tmp:
# image: busybox
# container_name: tmp_neptune
# command: chmod -R 777 /var/run/socks
# volumes:
# - /var/run/socks
redis:
image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf
# ulimits:
# nproc: 65535
# nofile:
# soft: 65535
# hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
- neptune_redis_data:/data
- type: bind
source: /code/run
target: /var/run
# volumes_from:
# - tmp
healthcheck:
test: "redis-cli ping"
interval: 2s
timeout: 2s
retries: 15
# networks:
# - default
# - xf
# networks:
# default:
# driver: bridge
# xf:
# external: true
# db:
# external: true
volumes:
# neptune_static: {}
neptune_redis_data: {}

View File

@@ -1,194 +0,0 @@
version: "2.2"
services:
app:
image: pathogen/neptune:latest
container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
processing:
image: pathogen/neptune:latest
container_name: processing_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py processing'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
scheduling:
image: pathogen/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
migration:
image: pathogen/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
collectstatic:
image: pathogen/neptune:latest
container_name: collectstatic_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
env_file:
- stack.env
depends_on:
redis:
condition: service_healthy
nginx:
image: nginx:latest
container_name: nginx_neptune
ports:
- ${APP_PORT}:9999
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
networks:
- default
- pathogen
depends_on:
app:
condition: service_started
tmp:
image: busybox
container_name: tmp_neptune
command: chmod -R 777 /var/run/socks
volumes:
- /var/run/socks
redis:
image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
volumes_from:
- tmp
healthcheck:
test: "redis-cli -s /var/run/socks/redis.sock ping"
interval: 2s
timeout: 2s
retries: 15
networks:
- default
- pathogen
networks:
default:
driver: bridge
pathogen:
external: true
elastic:
external: true
volumes:
neptune_static: {}

View File

@@ -1,5 +1,5 @@
unixsocket /var/run/socks/redis.sock
unixsocket /var/run/neptune-redis.sock
unixsocketperm 777
# For Monolith PubSub
port 6379
port 0
# port 6379
# requirepass changeme

View File

@@ -4,9 +4,19 @@ module=app.wsgi:application
env=DJANGO_SETTINGS_MODULE=app.settings
master=1
pidfile=/tmp/project-master.pid
socket=0.0.0.0:8000
#socket=0.0.0.0:8000
socket=/var/run/uwsgi-neptune.sock
# socket 777
chmod-socket=777
harakiri=20
max-requests=100000
#max-requests=100000
# Set a lower value for max-requests to prevent memory leaks from building up over time
max-requests=1000
# Ensure old worker processes are cleaned up properly
reload-on-as=512
reload-on-rss=256
vacuum=1
home=/venv
processes=12
processes=4
threads=2
log-level=debug

View File

@@ -21,3 +21,10 @@ orjson
msgpack
apscheduler
django-prettyjson
git+https://git.zm.is/XF/django-crud-mixins
# For caching
redis
hiredis
django-cachalot
django_redis
httpx

View File

@@ -1,6 +1,86 @@
# General application settings
APP_PORT=5000
PORTAINER_GIT_DIR=.
APP_LOCAL_SETTINGS=./app/local_settings.py
APP_DATABASE_FILE=./db.sqlite3
STATIC_ROOT=/conf/static
OPERATION=dev
STATIC_ROOT=/code/static
OPERATION=uwsgi
# Elasticsearch settings
ELASTICSEARCH_URL=10.1.0.1
ELASTICSEARCH_PORT=9200
ELASTICSEARCH_TLS=True
ELASTICSEARCH_USERNAME=admin
ELASTICSEARCH_PASSWORD=secret
# Manticore settings
MANTICORE_URL=http://127.0.0.1:9308
# Database settings
DB_BACKEND=MANTICORE
INDEX_MAIN=main
INDEX_RESTRICTED=restricted
INDEX_META=meta
INDEX_INT=internal
INDEX_RULE_STORAGE=rule_storage
MAIN_SIZES=1,5,15,30,50,100,250,500,1000
MAIN_SIZES_ANON=1,5,15,30,50,100
MAIN_SOURCES=dis,4ch,all
SOURCES_RESTRICTED=irc
CACHE=True
CACHE_TIMEOUT=2
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE=15
DRILLDOWN_DEFAULT_SIZE=15
DRILLDOWN_DEFAULT_INDEX=main
DRILLDOWN_DEFAULT_SORTING=desc
DRILLDOWN_DEFAULT_SOURCE=all
# URLs
DOMAIN=spy.zm.is
URL=https://spy.zm.is
# Access control
ALLOWED_HOSTS=spy.zm.is
# CSRF
CSRF_TRUSTED_ORIGINS=https://spy.zm.is
# Stripe settings
BILLING_ENABLED=False
STRIPE_TEST=True
STRIPE_API_KEY_TEST=
STRIPE_PUBLIC_API_KEY_TEST=
STRIPE_API_KEY_PROD=
STRIPE_PUBLIC_API_KEY_PROD=
STRIPE_ENDPOINT_SECRET=
STRIPE_ADMIN_COUPON=
# Threshold settings
THRESHOLD_ENDPOINT=http://threshold:13869
THRESHOLD_API_KEY=api_1
THRESHOLD_API_TOKEN=
THRESHOLD_API_COUNTER=
# NickTrace settings
NICKTRACE_MAX_ITERATIONS=4
NICKTRACE_MAX_CHUNK_SIZE=500
NICKTRACE_QUERY_SIZE=10000
# Meta settings
META_MAX_ITERATIONS=4
META_MAX_CHUNK_SIZE=500
META_QUERY_SIZE=10000
# Debugging and profiling
DEBUG=n
PROFILER=False
# Redis settings
REDIS_HOST=redis_neptune
REDIS_PASSWORD=changeme
REDIS_DB=1
REDIS_DB_CACHE=10
REDIS_PORT=6379