Compare commits

...

56 Commits

Author SHA1 Message Date
b61eda52e6 Update URLs 2025-05-09 21:01:58 +00:00
a4c3834b62 Port Manticore and debug 2025-01-23 11:35:39 +00:00
fe087eb591 Update more Redis URLs 2025-01-23 11:34:24 +00:00
6ff75890b9 Update Redis path to socket 2025-01-23 11:33:37 +00:00
c199d3d078 Update admin URL 2025-01-23 11:33:17 +00:00
66f4e4c264 Add static directory generated by collectstatic to ignore 2025-01-23 11:32:54 +00:00
b16db665e5 Update to work with Podman 2025-01-23 11:32:13 +00:00
86fb2ac593 Update to run with Podman 2024-12-29 17:37:23 +00:00
a519a4ce5e Change Redis parser class 2023-09-30 10:45:47 +00:00
f62f0881a1 Show ingest status 2023-02-14 07:20:28 +00:00
fd47a3ddc8 Use the sentiment aggregation value if present 2023-02-14 07:20:28 +00:00
d8cb3a263b Add dot 2023-02-14 07:20:27 +00:00
27fea06198 Allow disabling ingesting 2023-02-13 21:03:33 +00:00
0e12b0d185 Properly search tokens and annotate in matched field 2023-02-13 18:14:25 +00:00
6fe31d99a9 Re-add matches field 2023-02-13 17:23:30 +00:00
1ab7a95ebd Remove debug statements 2023-02-13 17:23:27 +00:00
d581d787de Increase topic length 2023-02-13 07:20:28 +00:00
4ead6ff7c1 Use cachalot to invalidate caches 2023-02-11 17:24:13 +00:00
9fcf5041f0 Use Hiredis 2023-02-11 16:01:42 +00:00
2fc476b830 Vary cache on URL 2023-02-11 15:48:46 +00:00
11d4542412 Cache the table and remove CRUD tools included in mixins 2023-02-11 15:44:20 +00:00
5d6f96bbf3 Cache more object lists 2023-02-11 14:58:36 +00:00
40a710f41e Add caching 2023-02-11 14:03:50 +00:00
87c232d3f9 Fix notification delivery 2023-02-10 22:52:59 +00:00
df273a6009 Switch database location and use mixins for CRUD 2023-02-10 20:57:17 +00:00
115c6dd1ad Add mixins and adjust database path 2023-02-10 20:53:11 +00:00
330cc6c401 Fix showing the debug toolbar 2023-02-10 07:20:12 +00:00
2050e6cb47 Add more comments about source parsing 2023-02-10 07:20:36 +00:00
7d0ebf87bd Fix source parsing and set default to all 2023-02-10 07:20:22 +00:00
c5856ce20b Use HX-Replace-Url properly and don't include column shifter twice on load 2023-02-10 07:20:22 +00:00
0518c9fe1c Remove comma after last entry in column shifter 2023-02-10 07:20:11 +00:00
29e57628e4 HX-Replace URLs instead of pushing 2023-02-09 23:38:12 +00:00
cb9500a36d Rename match and aggs fields 2023-02-09 23:34:29 +00:00
e993f0f20e Fix index handling for rule search context 2023-02-09 23:32:06 +00:00
56b268bd77 Fix sending aggs and matched fields 2023-02-09 23:18:16 +00:00
4042d60c57 Fix matched formatting 2023-02-09 22:59:00 +00:00
090fae013d Remove debug statements 2023-02-09 22:55:18 +00:00
2356c6bcd7 Fix formatting matched arguments 2023-02-09 22:54:38 +00:00
f4273e4453 Properly handle matched and meta fields, always return sentiment aggregations 2023-02-09 21:41:00 +00:00
c67d89c978 Implement deleting database matches 2023-02-09 21:17:50 +00:00
9a8bb9027f Make notifications available to all users and clear database matches on reset 2023-02-09 20:50:05 +00:00
9519c1ac9f Fix policies not triggering properly 2023-02-09 20:28:34 +00:00
7b6da7b704 Add batch_id to tables 2023-02-09 19:11:46 +00:00
0d564788b6 Implement policy parsing and add batch_id to rules 2023-02-09 19:11:38 +00:00
fd10a4ba8e Make things lowercase, improve search queries for rules 2023-02-09 19:10:15 +00:00
455da73b95 Improve results rendering 2023-02-09 19:09:32 +00:00
d8005fa15d Strip leading and trailing brackets from prettified JSON 2023-02-09 19:08:27 +00:00
6a01aea5e1 Add prettyjson 2023-02-09 19:07:49 +00:00
a1a5535079 Bump versions in pre-commit config 2023-02-09 07:20:35 +00:00
3f666e8251 Update pre-commit versions 2023-02-09 07:20:35 +00:00
66232c8260 Ingest no matches 2023-02-09 07:20:07 +00:00
2c12854a55 Set URL label properly 2023-02-09 07:20:07 +00:00
af5c212450 Ingest when there are no matches, add extra validation for send empty 2023-02-09 07:20:07 +00:00
2a034a16e7 Allow disabling notifications 2023-02-09 07:20:07 +00:00
c356f58d8a Add the time taken even where there are no hits 2023-02-09 07:20:28 +00:00
6a890723d9 Add index rule directive to example settings 2023-02-09 07:20:28 +00:00
70 changed files with 3137 additions and 2503 deletions

4
.gitignore vendored
View File

@@ -58,7 +58,6 @@ cover/
# Django stuff: # Django stuff:
*.log *.log
local_settings.py
db.sqlite3 db.sqlite3
db.sqlite3-journal db.sqlite3-journal
@@ -156,3 +155,6 @@ cython_debug/
.bash_history .bash_history
.python_history .python_history
.vscode/ .vscode/
stack.env
static/

View File

@@ -1,22 +1,22 @@
repos: repos:
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 22.6.0 rev: 23.1.0
hooks: hooks:
- id: black - id: black
exclude: ^core/migrations exclude: ^core/migrations
- repo: https://github.com/PyCQA/isort - repo: https://github.com/PyCQA/isort
rev: 5.10.1 rev: 5.11.5
hooks: hooks:
- id: isort - id: isort
args: ["--profile", "black"] args: ["--profile", "black"]
- repo: https://github.com/PyCQA/flake8 - repo: https://github.com/PyCQA/flake8
rev: 4.0.1 rev: 6.0.0
hooks: hooks:
- id: flake8 - id: flake8
args: [--max-line-length=88] args: [--max-line-length=88]
exclude: ^core/migrations exclude: ^core/migrations
- repo: https://github.com/rtts/djhtml - repo: https://github.com/rtts/djhtml
rev: 'v1.5.2' # replace with the latest tag on GitHub rev: v2.0.0
hooks: hooks:
- id: djhtml - id: djhtml
args: [-t 2] args: [-t 2]

View File

@@ -2,17 +2,17 @@
FROM python:3 FROM python:3
ARG OPERATION ARG OPERATION
RUN useradd -d /code pathogen RUN useradd -d /code xf
RUN mkdir -p /code RUN mkdir -p /code
RUN chown -R pathogen:pathogen /code RUN chown -R xf:xf /code
RUN mkdir -p /conf/static RUN mkdir -p /conf/static
RUN chown -R pathogen:pathogen /conf RUN chown -R xf:xf /conf
RUN mkdir /venv RUN mkdir /venv
RUN chown pathogen:pathogen /venv RUN chown xf:xf /venv
USER pathogen USER xf
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
WORKDIR /code WORKDIR /code

View File

@@ -1,20 +1,20 @@
run: run:
docker-compose --env-file=stack.env up -d docker-compose -f docker-compose.prod.yml --env-file=stack.env up -d
build: build:
docker-compose --env-file=stack.env build docker-compose -f docker-compose.prod.yml --env-file=stack.env build
stop: stop:
docker-compose --env-file=stack.env down docker-compose -f docker-compose.prod.yml --env-file=stack.env down
log: log:
docker-compose --env-file=stack.env logs -f docker-compose -f docker-compose.prod.yml --env-file=stack.env logs -f --names
migrate: migrate:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations: makemigrations:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth: auth:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"

View File

@@ -1,3 +1,5 @@
from os import getenv
# Elasticsearch settings # Elasticsearch settings
ELASTICSEARCH_URL = "10.1.0.1" ELASTICSEARCH_URL = "10.1.0.1"
ELASTICSEARCH_PORT = 9200 ELASTICSEARCH_PORT = 9200
@@ -15,6 +17,7 @@ INDEX_MAIN = "main"
INDEX_RESTRICTED = "restricted" INDEX_RESTRICTED = "restricted"
INDEX_META = "meta" INDEX_META = "meta"
INDEX_INT = "internal" INDEX_INT = "internal"
INDEX_RULE_STORAGE = "rule_storage"
MAIN_SIZES = ["1", "5", "15", "30", "50", "100", "250", "500", "1000"] MAIN_SIZES = ["1", "5", "15", "30", "50", "100", "250", "500", "1000"]
MAIN_SIZES_ANON = ["1", "5", "15", "30", "50", "100"] MAIN_SIZES_ANON = ["1", "5", "15", "30", "50", "100"]
@@ -28,7 +31,7 @@ DRILLDOWN_DEFAULT_PARAMS = {
"size": "15", "size": "15",
"index": "main", "index": "main",
"sorting": "desc", "sorting": "desc",
"source": "4ch", "source": "all",
} }
@@ -103,3 +106,8 @@ META_QUERY_SIZE = 10000
DEBUG = True DEBUG = True
PROFILER = False PROFILER = False
REDIS_HOST = getenv("REDIS_HOST", "redis_fisk_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))

87
app/local_settings.py Normal file
View File

@@ -0,0 +1,87 @@
from os import getenv
trues = ("t", "true", "yes", "y", "1")
# Elasticsearch settings
ELASTICSEARCH_URL = getenv("ELASTICSEARCH_URL", "10.1.0.1")
ELASTICSEARCH_PORT = int(getenv("ELASTICSEARCH_PORT", "9200"))
ELASTICSEARCH_TLS = getenv("ELASTICSEARCH_TLS", "True").lower() in trues
ELASTICSEARCH_USERNAME = getenv("ELASTICSEARCH_USERNAME", "admin")
ELASTICSEARCH_PASSWORD = getenv("ELASTICSEARCH_PASSWORD", "secret")
# Manticore settings
MANTICORE_URL = getenv("MANTICORE_URL", "http://example-db-1:9308")
DB_BACKEND = getenv("DB_BACKEND", "MANTICORE")
# Common DB settings
INDEX_MAIN = getenv("INDEX_MAIN", "main")
INDEX_RESTRICTED = getenv("INDEX_RESTRICTED", "restricted")
INDEX_META = getenv("INDEX_META", "meta")
INDEX_INT = getenv("INDEX_INT", "internal")
INDEX_RULE_STORAGE = getenv("INDEX_RULE_STORAGE", "rule_storage")
MAIN_SIZES = getenv("MAIN_SIZES", "1,5,15,30,50,100,250,500,1000").split(",")
MAIN_SIZES_ANON = getenv("MAIN_SIZES_ANON", "1,5,15,30,50,100").split(",")
MAIN_SOURCES = getenv("MAIN_SOURCES", "dis,4ch,all").split(",")
SOURCES_RESTRICTED = getenv("SOURCES_RESTRICTED", "irc").split(",")
CACHE = getenv("CACHE", "False").lower() in trues
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", "2"))
DRILLDOWN_RESULTS_PER_PAGE = int(getenv("DRILLDOWN_RESULTS_PER_PAGE", "15"))
DRILLDOWN_DEFAULT_PARAMS = {
"size": getenv("DRILLDOWN_DEFAULT_SIZE", "15"),
"index": getenv("DRILLDOWN_DEFAULT_INDEX", "main"),
"sorting": getenv("DRILLDOWN_DEFAULT_SORTING", "desc"),
"source": getenv("DRILLDOWN_DEFAULT_SOURCE", "all"),
}
# URLs
DOMAIN = getenv("DOMAIN", "example.com")
URL = getenv("URL", f"https://{DOMAIN}")
# Access control
ALLOWED_HOSTS = getenv("ALLOWED_HOSTS", f"127.0.0.1,{DOMAIN}").split(",")
# CSRF
CSRF_TRUSTED_ORIGINS = getenv("CSRF_TRUSTED_ORIGINS", URL).split(",")
# Stripe
BILLING_ENABLED = getenv("BILLING_ENABLED", "false").lower() in trues
STRIPE_TEST = getenv("STRIPE_TEST", "True").lower() in trues
STRIPE_API_KEY_TEST = getenv("STRIPE_API_KEY_TEST", "")
STRIPE_PUBLIC_API_KEY_TEST = getenv("STRIPE_PUBLIC_API_KEY_TEST", "")
STRIPE_API_KEY_PROD = getenv("STRIPE_API_KEY_PROD", "")
STRIPE_PUBLIC_API_KEY_PROD = getenv("STRIPE_PUBLIC_API_KEY_PROD", "")
STRIPE_ENDPOINT_SECRET = getenv("STRIPE_ENDPOINT_SECRET", "")
STATIC_ROOT = getenv("STATIC_ROOT", "")
SECRET_KEY = getenv("SECRET_KEY", "a")
STRIPE_ADMIN_COUPON = getenv("STRIPE_ADMIN_COUPON", "")
# Threshold
THRESHOLD_ENDPOINT = getenv("THRESHOLD_ENDPOINT", "http://threshold:13869")
THRESHOLD_API_KEY = getenv("THRESHOLD_API_KEY", "api_1")
THRESHOLD_API_TOKEN = getenv("THRESHOLD_API_TOKEN", "")
THRESHOLD_API_COUNTER = getenv("THRESHOLD_API_COUNTER", "")
# NickTrace
NICKTRACE_MAX_ITERATIONS = int(getenv("NICKTRACE_MAX_ITERATIONS", "4"))
NICKTRACE_MAX_CHUNK_SIZE = int(getenv("NICKTRACE_MAX_CHUNK_SIZE", "500"))
NICKTRACE_QUERY_SIZE = int(getenv("NICKTRACE_QUERY_SIZE", "10000"))
# Meta
META_MAX_ITERATIONS = int(getenv("META_MAX_ITERATIONS", "4"))
META_MAX_CHUNK_SIZE = int(getenv("META_MAX_CHUNK_SIZE", "500"))
META_QUERY_SIZE = int(getenv("META_QUERY_SIZE", "10000"))
DEBUG = getenv("DEBUG", "True").lower() in trues
PROFILER = getenv("PROFILER", "False").lower() in trues
REDIS_HOST = getenv("REDIS_HOST", "redis_neptune_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "1"))
REDIS_DB_CACHE = int(getenv("REDIS_DB_CACHE", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
# Elasticsearch blacklist
ELASTICSEARCH_BLACKLISTED = {}

View File

@@ -42,7 +42,11 @@ INSTALLED_APPS = [
"crispy_bulma", "crispy_bulma",
"django_tables2", "django_tables2",
"django_tables2_bulma_template", "django_tables2_bulma_template",
"prettyjson",
"mixins",
"cachalot",
] ]
CRISPY_TEMPLATE_PACK = "bulma" CRISPY_TEMPLATE_PACK = "bulma"
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",) CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html" DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
@@ -51,7 +55,9 @@ MIDDLEWARE = [
"debug_toolbar.middleware.DebugToolbarMiddleware", "debug_toolbar.middleware.DebugToolbarMiddleware",
"django.middleware.security.SecurityMiddleware", "django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.sessions.middleware.SessionMiddleware",
# 'django.middleware.cache.UpdateCacheMiddleware',
"django.middleware.common.CommonMiddleware", "django.middleware.common.CommonMiddleware",
# 'django.middleware.cache.FetchFromCacheMiddleware',
"django.middleware.csrf.CsrfViewMiddleware", "django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware", "django.contrib.messages.middleware.MessageMiddleware",
@@ -86,7 +92,7 @@ WSGI_APPLICATION = "app.wsgi.application"
DATABASES = { DATABASES = {
"default": { "default": {
"ENGINE": "django.db.backends.sqlite3", "ENGINE": "django.db.backends.sqlite3",
"NAME": BASE_DIR / "db.sqlite3", "NAME": "/conf/db.sqlite3",
} }
} }
@@ -144,7 +150,7 @@ REST_FRAMEWORK = {
INTERNAL_IPS = [ INTERNAL_IPS = [
"127.0.0.1", "127.0.0.1",
"10.1.10.11", # "10.1.10.11",
] ]
DEBUG_TOOLBAR_PANELS = [ DEBUG_TOOLBAR_PANELS = [
@@ -163,10 +169,27 @@ DEBUG_TOOLBAR_PANELS = [
"debug_toolbar.panels.logging.LoggingPanel", "debug_toolbar.panels.logging.LoggingPanel",
"debug_toolbar.panels.redirects.RedirectsPanel", "debug_toolbar.panels.redirects.RedirectsPanel",
"debug_toolbar.panels.profiling.ProfilingPanel", "debug_toolbar.panels.profiling.ProfilingPanel",
"cachalot.panels.CachalotPanel",
] ]
from app.local_settings import * # noqa from app.local_settings import * # noqa
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "unix:///var/run/socks/redis.sock",
# "LOCATION": f"redis://{REDIS_HOST}:{REDIS_PORT}",
"LOCATION": "unix:///var/run/neptune-redis.sock",
"OPTIONS": {
"db": REDIS_DB,
# "parser_class": "django_redis.cache.RedisCache",
# "PASSWORD": REDIS_PASSWORD,
"pool_class": "redis.BlockingConnectionPool",
},
}
}
if PROFILER: # noqa - trust me its there if PROFILER: # noqa - trust me its there
import pyroscope import pyroscope
@@ -178,3 +201,12 @@ if PROFILER: # noqa - trust me its there
# "region": f'{os.getenv("REGION")}', # "region": f'{os.getenv("REGION")}',
# } # }
) )
def show_toolbar(request):
return DEBUG # noqa: from local imports
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": show_toolbar,
}

View File

@@ -58,6 +58,9 @@ from core.views.manage.threshold.threshold import (
ThresholdIRCOverview, ThresholdIRCOverview,
) )
# Stats
from core.views.manage.monolith import stats
# Main tool pages # Main tool pages
from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown, from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
DrilldownContextModal, DrilldownContextModal,
@@ -92,7 +95,7 @@ urlpatterns = [
), ),
path("cancel/", TemplateView.as_view(template_name="cancel.html"), name="cancel"), path("cancel/", TemplateView.as_view(template_name="cancel.html"), name="cancel"),
path("portal", Portal.as_view(), name="portal"), path("portal", Portal.as_view(), name="portal"),
path("admin/", admin.site.urls), path("sapp/", admin.site.urls),
path("accounts/", include("django.contrib.auth.urls")), path("accounts/", include("django.contrib.auth.urls")),
path("accounts/signup/", Signup.as_view(), name="signup"), path("accounts/signup/", Signup.as_view(), name="signup"),
## ##
@@ -311,4 +314,14 @@ urlpatterns = [
notifications.RuleClear.as_view(), notifications.RuleClear.as_view(),
name="rule_clear", name="rule_clear",
), ),
path(
"manage/monolith/stats/",
stats.MonolithStats.as_view(),
name="monolith_stats",
),
path(
"manage/monolith/stats_db/<str:type>/",
stats.MonolithDBStats.as_view(),
name="monolith_stats_db",
)
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)

View File

@@ -6,8 +6,15 @@ from redis import StrictRedis
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true" os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
# /var/run/neptune-redis.sock
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) # use the socket
r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=settings.REDIS_DB)
# r = StrictRedis(
# host=settings.REDIS_HOST,
# port=settings.REDIS_PORT,
# password=settings.REDIS_PASSWORD,
# db=settings.REDIS_DB
# )
if settings.STRIPE_TEST: if settings.STRIPE_TEST:
stripe.api_key = settings.STRIPE_API_KEY_TEST stripe.api_key = settings.STRIPE_API_KEY_TEST

View File

@@ -168,6 +168,71 @@ class StorageBackend(ABC):
# Actually get rid of all the things we set to None # Actually get rid of all the things we set to None
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit] response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
def add_bool(self, search_query, add_bool):
"""
Add the specified boolean matches to search query.
"""
if not add_bool:
return
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match_phrase": item})
def add_top(self, search_query, add_top, negative=False):
"""
Merge add_top with the base of the search_query.
"""
if not add_top:
return
if negative:
for item in add_top:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
else:
for item in add_top:
if "query" not in search_query:
search_query["query"] = {"bool": {"must": []}}
search_query["query"]["bool"]["must"].append(item)
def schedule_check_aggregations(self, rule_object, result_map):
"""
Check the results of a scheduled query for aggregations.
"""
if rule_object.aggs is None:
return result_map
for index, (meta, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta["aggs"]:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map
def query(self, user, search_query, **kwargs): def query(self, user, search_query, **kwargs):
# For time tracking # For time tracking
start = time.process_time() start = time.process_time()
@@ -195,13 +260,38 @@ class StorageBackend(ABC):
message = f"Error: {response.info['error']['root_cause'][0]['type']}" message = f"Error: {response.info['error']['root_cause'][0]['type']}"
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
if len(response["hits"]["hits"]) == 0: if "took" in response:
if response["took"] is None:
return None
if "error" in response:
message = f"Error: {response['error']}"
message_class = "danger"
time_took = (time.process_time() - start) * 1000
# Round to 3 significant figures
time_took_rounded = round(
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
)
return {
"message": message,
"class": message_class,
"took": time_took_rounded,
}
elif len(response["hits"]["hits"]) == 0:
message = "No results." message = "No results."
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} time_took = (time.process_time() - start) * 1000
# Round to 3 significant figures
time_took_rounded = round(
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
)
return {
"message": message,
"class": message_class,
"took": time_took_rounded,
}
# For Druid # For Druid
if "error" in response: elif "error" in response:
if "errorMessage" in response: if "errorMessage" in response:
context = { context = {
"message": response["errorMessage"], "message": response["errorMessage"],
@@ -210,9 +300,6 @@ class StorageBackend(ABC):
return context return context
else: else:
return response return response
if "took" in response:
if response["took"] is None:
return None
# Removed for now, no point given we have restricted indexes # Removed for now, no point given we have restricted indexes
# self.filter_blacklisted(user, response) # self.filter_blacklisted(user, response)
@@ -231,6 +318,106 @@ class StorageBackend(ABC):
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1) time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
return {"object_list": response_parsed, "took": time_took_rounded} return {"object_list": response_parsed, "took": time_took_rounded}
def construct_context_query(
self, index, net, channel, src, num, size, type=None, nicks=None
):
# Get the initial query
query = self.construct_query(None, size, blank=True)
extra_must = []
extra_should = []
extra_should2 = []
if num:
extra_must.append({"match_phrase": {"num": num}})
if net:
extra_must.append({"match_phrase": {"net": net}})
if channel:
extra_must.append({"match": {"channel": channel}})
if nicks:
for nick in nicks:
extra_should2.append({"match": {"nick": nick}})
types = ["msg", "notice", "action", "kick", "topic", "mode"]
fields = [
"nick",
"ident",
"host",
"channel",
"ts",
"msg",
"type",
"net",
"src",
"tokens",
]
query["fields"] = fields
if index == "internal":
fields.append("mtype")
if channel == "*status" or type == "znc":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
# Type is one of msg or notice
# extra_should.append({"match": {"mtype": "msg"}})
# extra_should.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "znc"}})
extra_should.append({"match": {"type": "self"}})
extra_should2.append({"match": {"type": "znc"}})
extra_should2.append({"match": {"nick": channel}})
elif type == "auth":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
extra_should2.append({"match": {"nick": channel}})
# extra_should2.append({"match": {"mtype": "msg"}})
# extra_should2.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "query"}})
extra_should2.append({"match": {"type": "self"}})
extra_should.append({"match": {"nick": channel}})
else:
for ctype in types:
extra_should.append({"match": {"mtype": ctype}})
else:
for ctype in types:
extra_should.append({"match": {"type": ctype}})
# query = {
# "index": index,
# "limit": size,
# "query": {
# "bool": {
# "must": [
# # {"equals": {"src": src}},
# # {
# # "bool": {
# # "should": [*extra_should],
# # }
# # },
# # {
# # "bool": {
# # "should": [*extra_should2],
# # }
# # },
# *extra_must,
# ]
# }
# },
# "fields": fields,
# # "_source": False,
# }
if extra_must:
for x in extra_must:
query["query"]["bool"]["must"].append(x)
if extra_should:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2:
query["query"]["bool"]["must"].append(
{"bool": {"should": [*extra_should2]}}
)
return query
@abstractmethod @abstractmethod
def query_results(self, **kwargs): def query_results(self, **kwargs):
pass pass

View File

@@ -20,7 +20,7 @@ logger = logging.getLogger(__name__)
class DruidBackend(StorageBackend): class DruidBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("Druid") super().__init__("druid")
def initialise(self, **kwargs): def initialise(self, **kwargs):
# self.client = PyDruid("http://broker:8082", "druid/v2") # self.client = PyDruid("http://broker:8082", "druid/v2")

View File

@@ -33,7 +33,7 @@ mapping = {
"ts": {"type": "date", "format": "epoch_second"}, "ts": {"type": "date", "format": "epoch_second"},
"match_ts": {"type": "date", "format": "iso8601"}, "match_ts": {"type": "date", "format": "iso8601"},
"file_tim": {"type": "date", "format": "epoch_millis"}, "file_tim": {"type": "date", "format": "epoch_millis"},
"rule_uuid": {"type": "keyword"}, "rule_id": {"type": "keyword"},
} }
} }
} }
@@ -43,7 +43,7 @@ for field in keyword_fields:
class ElasticsearchBackend(StorageBackend): class ElasticsearchBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("Elasticsearch") super().__init__("elasticsearch")
self.client = None self.client = None
self.async_client = None self.async_client = None
@@ -79,6 +79,21 @@ class ElasticsearchBackend(StorageBackend):
index=settings.INDEX_RULE_STORAGE, mappings=mapping["mappings"] index=settings.INDEX_RULE_STORAGE, mappings=mapping["mappings"]
) )
def delete_rule_entries(self, rule_id):
"""
Delete all entries for a given rule.
:param rule_id: The rule ID to delete.
"""
if self.client is None:
self.initialise()
search_query = self.construct_query(None, None, blank=True)
search_query["query"]["bool"]["must"].append(
{"match_phrase": {"rule_id": rule_id}}
)
return self.client.delete_by_query(
index=settings.INDEX_RULE_STORAGE, body=search_query
)
def construct_context_query( def construct_context_query(
self, index, net, channel, src, num, size, type=None, nicks=None self, index, net, channel, src, num, size, type=None, nicks=None
): ):
@@ -294,11 +309,10 @@ class ElasticsearchBackend(StorageBackend):
self.log.error(f"Indexing failed: {result}") self.log.error(f"Indexing failed: {result}")
self.log.debug(f"Indexed {len(matches)} messages in ES") self.log.debug(f"Indexed {len(matches)} messages in ES")
async def schedule_query_results(self, rule_object): def prepare_schedule_query(self, rule_object):
""" """
Helper to run a scheduled query with reduced functionality and async. Helper to run a scheduled query with reduced functionality.
""" """
data = rule_object.parsed data = rule_object.parsed
if "tags" in data: if "tags" in data:
@@ -310,8 +324,6 @@ class ElasticsearchBackend(StorageBackend):
query = data["query"][0] query = data["query"][0]
data["query"] = query data["query"] = query
result_map = {}
add_bool = [] add_bool = []
add_top = [] add_top = []
if "source" in data: if "source" in data:
@@ -326,8 +338,15 @@ class ElasticsearchBackend(StorageBackend):
{"match_phrase": {"src": source_iter}} {"match_phrase": {"src": source_iter}}
) )
add_top.append(add_top_tmp) add_top.append(add_top_tmp)
if "tokens" in data:
add_top_tmp = {"bool": {"should": []}}
for token in data["tokens"]:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"tokens": token}}
)
add_top.append(add_top_tmp)
for field, values in data.items(): for field, values in data.items():
if field not in ["source", "index", "tags", "query", "sentiment"]: if field not in ["source", "index", "tags", "query", "sentiment", "tokens"]:
for value in values: for value in values:
add_top.append({"match": {field: value}}) add_top.append({"match": {field: value}})
# Bypass the check for query and tags membership since we can search by msg, etc # Bypass the check for query and tags membership since we can search by msg, etc
@@ -338,22 +357,58 @@ class ElasticsearchBackend(StorageBackend):
range_query = { range_query = {
"range": { "range": {
"ts": { "ts": {
"gte": f"now-{rule_object.window}/d", "gte": f"now-{rule_object.window}",
"lte": "now/d", "lte": "now",
} }
} }
} }
add_top.append(range_query) add_top.append(range_query)
self.add_bool(search_query, add_bool) self.add_bool(search_query, add_bool)
self.add_top(search_query, add_top) self.add_top(search_query, add_top)
if "sentiment" in data: # if "sentiment" in data:
search_query["aggs"] = { search_query["aggs"] = {
"avg_sentiment": { "avg_sentiment": {
"avg": {"field": "sentiment"}, "avg": {"field": "sentiment"},
}
} }
for index in data["index"]: }
return search_query
def schedule_query_results_test_sync(self, rule_object):
"""
Helper to run a scheduled query test with reduced functionality.
Sync version for running from Django forms.
Does not return results.
"""
data = rule_object.parsed
search_query = self.prepare_schedule_query(rule_object)
for index in data["index"]:
if "message" in search_query:
self.log.error(f"Error parsing test query: {search_query['message']}")
continue
response = self.run_query(
rule_object.user,
search_query,
index=index,
)
self.log.debug(f"Running scheduled test query on {index}: {search_query}")
# self.log.debug(f"Response from scheduled query: {response}")
if isinstance(response, Exception):
error = response.info["error"]["root_cause"][0]["reason"]
self.log.error(f"Error running test scheduled search: {error}")
raise QueryError(error)
async def schedule_query_results(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality and async.
"""
result_map = {}
data = rule_object.parsed
search_query = self.prepare_schedule_query(rule_object)
for index in data["index"]:
if "message" in search_query: if "message" in search_query:
self.log.error(f"Error parsing query: {search_query['message']}") self.log.error(f"Error parsing query: {search_query['message']}")
continue continue
@@ -370,6 +425,7 @@ class ElasticsearchBackend(StorageBackend):
raise QueryError(error) raise QueryError(error)
if len(response["hits"]["hits"]) == 0: if len(response["hits"]["hits"]) == 0:
# No results, skip # No results, skip
result_map[index] = ({}, [])
continue continue
meta, response = self.parse(response, meta=True) meta, response = self.parse(response, meta=True)
# print("Parsed response", response) # print("Parsed response", response)
@@ -380,35 +436,7 @@ class ElasticsearchBackend(StorageBackend):
# Average aggregation check # Average aggregation check
# Could probably do this in elasticsearch # Could probably do this in elasticsearch
for index, (meta, result) in result_map.items(): result_map = self.schedule_check_aggregations(rule_object, result_map)
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map return result_map
@@ -424,7 +452,6 @@ class ElasticsearchBackend(StorageBackend):
dedup_fields=None, dedup_fields=None,
tags=None, tags=None,
): ):
add_bool = [] add_bool = []
add_top = [] add_top = []
add_top_negative = [] add_top_negative = []
@@ -460,7 +487,7 @@ class ElasticsearchBackend(StorageBackend):
if rule_object is not None: if rule_object is not None:
index = settings.INDEX_RULE_STORAGE index = settings.INDEX_RULE_STORAGE
add_bool.append({"rule_uuid": str(rule_object.id)}) add_bool.append({"rule_id": str(rule_object.id)})
else: else:
# I - Index # I - Index
index = parse_index(request.user, query_params) index = parse_index(request.user, query_params)
@@ -480,14 +507,22 @@ class ElasticsearchBackend(StorageBackend):
if isinstance(sources, dict): if isinstance(sources, dict):
return sources return sources
total_count = len(sources) total_count = len(sources)
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED) # Total is -1 due to the "all" source
total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
# If the sources the user has access to are equal to all
# possible sources, then we don't need to add the source
# filter to the query.
if total_count != total_sources: if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}} add_top_tmp = {"bool": {"should": []}}
for source_iter in sources: for source_iter in sources:
add_top_tmp["bool"]["should"].append( add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}} {"match_phrase": {"src": source_iter}}
) )
add_top.append(add_top_tmp) if query_params["source"] != "all":
add_top.append(add_top_tmp)
# R - Ranges # R - Ranges
# date_query = False # date_query = False
@@ -507,12 +542,17 @@ class ElasticsearchBackend(StorageBackend):
sort = parse_sort(query_params) sort = parse_sort(query_params)
if isinstance(sort, dict): if isinstance(sort, dict):
return sort return sort
if rule_object is not None:
field = "match_ts"
else:
field = "ts"
if sort: if sort:
# For Druid compatibility # For Druid compatibility
sort_map = {"ascending": "asc", "descending": "desc"} sort_map = {"ascending": "asc", "descending": "desc"}
sorting = [ sorting = [
{ {
"ts": { field: {
"order": sort_map[sort], "order": sort_map[sort],
} }
} }
@@ -524,24 +564,28 @@ class ElasticsearchBackend(StorageBackend):
if isinstance(sentiment_r, dict): if isinstance(sentiment_r, dict):
return sentiment_r return sentiment_r
if sentiment_r: if sentiment_r:
if rule_object is not None:
sentiment_index = "meta.aggs.avg_sentiment.value"
else:
sentiment_index = "sentiment"
sentiment_method, sentiment = sentiment_r sentiment_method, sentiment = sentiment_r
range_query_compare = {"range": {"sentiment": {}}} range_query_compare = {"range": {sentiment_index: {}}}
range_query_precise = { range_query_precise = {
"match": { "match": {
"sentiment": None, sentiment_index: None,
} }
} }
if sentiment_method == "below": if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment range_query_compare["range"][sentiment_index]["lt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "above": elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment range_query_compare["range"][sentiment_index]["gt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "exact": elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment range_query_precise["match"][sentiment_index] = sentiment
add_top.append(range_query_precise) add_top.append(range_query_precise)
elif sentiment_method == "nonzero": elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0 range_query_precise["match"][sentiment_index] = 0
add_top_negative.append(range_query_precise) add_top_negative.append(range_query_precise)
# Add in the additional information we already populated # Add in the additional information we already populated

View File

@@ -1,19 +1,31 @@
import logging import logging
from datetime import datetime from datetime import datetime
from pprint import pprint from pprint import pprint
import httpx
import orjson
import requests import requests
from django.conf import settings from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results from core.db.processing import parse_results
from core.lib.parsing import (
QueryError,
parse_date_time,
parse_index,
parse_rule,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ManticoreBackend(StorageBackend): class ManticoreBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("Manticore") super().__init__("manticore")
def initialise(self, **kwargs): def initialise(self, **kwargs):
""" """
@@ -21,14 +33,27 @@ class ManticoreBackend(StorageBackend):
""" """
pass # we use requests pass # we use requests
def construct_query(self, query, size, index, blank=False): async def async_initialise(self, **kwargs):
"""
Initialise the Manticore client in async mode
"""
pass # we use requests
def delete_rule_entries(self, rule_id):
"""
Delete all entries for a given rule.
:param rule_id: The rule ID to delete.
"""
# TODO
def construct_query(self, query, size=None, blank=False, **kwargs):
""" """
Accept some query parameters and construct an OpenSearch query. Accept some query parameters and construct an OpenSearch query.
""" """
if not size: if not size:
size = 5 size = 5
query_base = { query_base = {
"index": index, "index": kwargs.get("index"),
"limit": size, "limit": size,
"query": {"bool": {"must": []}}, "query": {"bool": {"must": []}},
} }
@@ -39,11 +64,79 @@ class ManticoreBackend(StorageBackend):
query_base["query"]["bool"]["must"].append(query_string) query_base["query"]["bool"]["must"].append(query_string)
return query_base return query_base
def run_query(self, client, user, search_query): def parse(self, response, **kwargs):
response = requests.post( parsed = parse_results(response, **kwargs)
f"{settings.MANTICORE_URL}/json/search", json=search_query return parsed
)
return response def run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query.
"""
index = kwargs.get("index")
raw = kwargs.get("raw")
if search_query and not raw:
search_query["index"] = index
path = kwargs.get("path", "json/search")
if raw:
response = requests.post(
f"{settings.MANTICORE_URL}/{path}", search_query
)
else:
response = requests.post(
f"{settings.MANTICORE_URL}/{path}", json=search_query
)
return orjson.loads(response.text)
async def async_run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query asynchronously.
"""
index = kwargs.get("index")
search_query["index"] = index
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query
)
return orjson.loads(response.text)
async def async_store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def prepare_schedule_query(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality.
"""
# TODO
def schedule_query_results_test_sync(self, rule_object):
"""
Helper to run a scheduled query test with reduced functionality.
Sync version for running from Django forms.
Does not return results.
"""
# TODO
async def schedule_query_results(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality and async.
"""
# TODO
def query_results( def query_results(
self, self,
@@ -67,117 +160,77 @@ class ManticoreBackend(StorageBackend):
query_created = False query_created = False
source = None source = None
add_defaults(query_params) add_defaults(query_params)
# Check size
# Now, run the helpers for SIQTSRSS/ADR
# S - Size
# I - Index
# Q - Query
# T - Tags
# S - Source
# R - Ranges
# S - Sort
# S - Sentiment
# A - Annotate
# D - Dedup
# R - Reverse
# S - Size
if request.user.is_anonymous: if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON sizes = settings.MAIN_SIZES_ANON
else: else:
sizes = settings.MANTICORE_MAIN_SIZES sizes = settings.MAIN_SIZES
if not size: if not size:
if "size" in query_params: size = parse_size(query_params, sizes)
size = query_params["size"] if isinstance(size, dict):
if size not in sizes: return size
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 20
# Check index rule_object = parse_rule(request.user, query_params)
if "index" in query_params: if isinstance(rule_object, dict):
index = query_params["index"] return rule_object
if index == "main":
index = settings.MANTICORE_INDEX_MAIN if rule_object is not None:
else: index = settings.INDEX_RULE_STORAGE
if not request.user.has_perm(f"core.index_{index}"): add_bool.append({"rule_id": str(rule_object.id)})
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else: else:
index = settings.MANTICORE_INDEX_MAIN # I - Index
index = parse_index(request.user, query_params)
if isinstance(index, dict):
return index
# Create the search query # Q/T - Query/Tags
if "query" in query_params: search_query = self.parse_query(
query = query_params["query"] query_params, tags, size, custom_query, add_bool
search_query = self.construct_query(query, size, index) )
query_created = True # Query should be a dict, so check if it contains message here
else: if "message" in search_query:
if custom_query: return search_query
search_query = custom_query
if tags: # S - Sources
# Get a blank search query sources = parse_source(request.user, query_params)
if not query_created: if isinstance(sources, dict):
search_query = self.construct_query(None, size, index, blank=True) return sources
query_created = True total_count = len(sources)
for tagname, tagvalue in tags.items(): # Total is -1 due to the "all" source
add_bool.append({tagname: tagvalue}) total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
required_any = ["query_full", "query", "tags"] # If the sources the user has access to are equal to all
if not any([field in query_params.keys() for field in required_any]): # possible sources, then we don't need to add the source
if not custom_query: # filter to the query.
message = "Empty query!" if total_count != total_sources:
message_class = "warning" add_top_tmp = {"bool": {"should": []}}
return {"message": message, "class": message_class} for source_iter in sources:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}}
)
if query_params["source"] != "all":
add_top.append(add_top_tmp)
# Check for a source # R - Ranges
if "source" in query_params: # date_query = False
source = query_params["source"] from_ts, to_ts = parse_date_time(query_params)
if from_ts:
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
total_count += 1
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if not total_count == total_sources:
add_top.append(add_top_tmp)
# Date/time range
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
range_query = { range_query = {
"range": { "range": {
"ts": { "ts": {
@@ -188,115 +241,87 @@ class ManticoreBackend(StorageBackend):
} }
add_top.append(range_query) add_top.append(range_query)
# Sorting # S - Sort
if "sorting" in query_params: sort = parse_sort(query_params)
sorting = query_params["sorting"] if isinstance(sort, dict):
if sorting not in ("asc", "desc", "none"): return sort
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
{
"ts": {
"order": sorting,
}
}
]
# Sentiment handling if rule_object is not None:
if "check_sentiment" in query_params: field = "match_ts"
if "sentiment_method" not in query_params: else:
message = "No sentiment method" field = "ts"
message_class = "danger" if sort:
return {"message": message, "class": message_class} # For Druid compatibility
if "sentiment" in query_params: sort_map = {"ascending": "asc", "descending": "desc"}
sentiment = query_params["sentiment"] sorting = [
try: {
sentiment = float(sentiment) field: {
except ValueError: "order": sort_map[sort],
message = "Sentiment is not a float" }
message_class = "danger" }
return {"message": message, "class": message_class} ]
sentiment_method = query_params["sentiment_method"] search_query["sort"] = sorting
range_query_compare = {"range": {"sentiment": {}}}
# S - Sentiment
sentiment_r = parse_sentiment(query_params)
if isinstance(sentiment_r, dict):
return sentiment_r
if sentiment_r:
if rule_object is not None:
sentiment_index = "meta.aggs.avg_sentiment.value"
else:
sentiment_index = "sentiment"
sentiment_method, sentiment = sentiment_r
range_query_compare = {"range": {sentiment_index: {}}}
range_query_precise = { range_query_precise = {
"match": { "match": {
"sentiment": None, sentiment_index: None,
} }
} }
if sentiment_method == "below": if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment range_query_compare["range"][sentiment_index]["lt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "above": elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment range_query_compare["range"][sentiment_index]["gt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "exact": elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment range_query_precise["match"][sentiment_index] = sentiment
add_top.append(range_query_precise) add_top.append(range_query_precise)
elif sentiment_method == "nonzero": elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0 range_query_precise["match"][sentiment_index] = 0
add_top_negative.append(range_query_precise) add_top_negative.append(range_query_precise)
if add_bool: # Add in the additional information we already populated
# if "bool" not in search_query["query"]: self.add_bool(search_query, add_bool)
# search_query["query"]["bool"] = {} self.add_top(search_query, add_top)
# if "must" not in search_query["query"]["bool"]: self.add_top(search_query, add_top_negative, negative=True)
# search_query["query"]["bool"] = {"must": []}
for item in add_bool: response = self.query(
search_query["query"]["bool"]["must"].append({"match": item}) request.user,
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
search_query, search_query,
index=index,
) )
if not results: if not response:
message = "Error running query" message = "Error running query"
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
# results = results.to_dict() # results = results.to_dict()
if "error" in results: if "error" in response:
message = results["error"] message = response["error"]
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
results_parsed = parse_results(results) if "message" in response:
if annotate: return response
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse: # A/D/R - Annotate/Dedup/Reverse
results_parsed = results_parsed[::-1] response["object_list"] = self.process_results(
response["object_list"],
annotate=annotate,
dedup=dedup,
dedup_fields=dedup_fields,
reverse=reverse,
)
if dedup: context = response
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
return context return context

302
core/db/manticore_orig.py Normal file
View File

@@ -0,0 +1,302 @@
import logging
from datetime import datetime
from pprint import pprint
import requests
from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results
logger = logging.getLogger(__name__)
class ManticoreBackend(StorageBackend):
def __init__(self):
super().__init__("manticore")
def initialise(self, **kwargs):
"""
Initialise the Manticore client
"""
pass # we use requests
def construct_query(self, query, size, index, blank=False):
"""
Accept some query parameters and construct an OpenSearch query.
"""
if not size:
size = 5
query_base = {
"index": index,
"limit": size,
"query": {"bool": {"must": []}},
}
query_string = {
"query_string": query,
}
if not blank:
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def run_query(self, client, user, search_query):
response = requests.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query
)
return response
def query_results(
self,
request,
query_params,
size=None,
annotate=True,
custom_query=False,
reverse=False,
dedup=False,
dedup_fields=None,
tags=None,
):
query = None
message = None
message_class = None
add_bool = []
add_top = []
add_top_negative = []
sort = None
query_created = False
source = None
add_defaults(query_params)
# Check size
if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON
else:
sizes = settings.MANTICORE_MAIN_SIZES
if not size:
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 20
# Check index
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.MANTICORE_INDEX_MAIN
else:
if not request.user.has_perm(f"core.index_{index}"):
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.MANTICORE_INDEX_MAIN
# Create the search query
if "query" in query_params:
query = query_params["query"]
search_query = self.construct_query(query, size, index)
query_created = True
else:
if custom_query:
search_query = custom_query
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, index, blank=True)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
# Check for a source
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
total_count += 1
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if not total_count == total_sources:
add_top.append(add_top_tmp)
# Date/time range
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
range_query = {
"range": {
"ts": {
"gt": from_ts,
"lt": to_ts,
}
}
}
add_top.append(range_query)
# Sorting
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
{
"ts": {
"order": sorting,
}
}
]
# Sentiment handling
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = {
"match": {
"sentiment": None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
add_top_negative.append(range_query_precise)
if add_bool:
# if "bool" not in search_query["query"]:
# search_query["query"]["bool"] = {}
# if "must" not in search_query["query"]["bool"]:
# search_query["query"]["bool"] = {"must": []}
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match": item})
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
search_query,
)
if not results:
message = "Error running query"
message_class = "danger"
return {"message": message, "class": message_class}
# results = results.to_dict()
if "error" in results:
message = results["error"]
message_class = "danger"
return {"message": message, "class": message_class}
results_parsed = parse_results(results)
if annotate:
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse:
results_parsed = results_parsed[::-1]
if dedup:
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
return context

View File

@@ -1,5 +1,5 @@
from datetime import datetime from datetime import datetime
import ast
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
@@ -92,6 +92,11 @@ def parse_results(results, meta=None):
for field in list(element.keys()): for field in list(element.keys()):
if element[field] == "": if element[field] == "":
del element[field] del element[field]
# Unfold the tokens
if "tokens" in element:
if element["tokens"].startswith('["') or element["tokens"].startswith("['"):
tokens_parsed = ast.literal_eval(element["tokens"])
element["tokens"] = tokens_parsed
# Split the timestamp into date and time # Split the timestamp into date and time
if "ts" not in element: if "ts" not in element:

View File

@@ -2,6 +2,7 @@ from django import forms
from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.forms import UserCreationForm
from django.core.exceptions import FieldDoesNotExist from django.core.exceptions import FieldDoesNotExist
from django.forms import ModelForm from django.forms import ModelForm
from mixins.restrictions import RestrictedFormMixin
from core.db.storage import db from core.db.storage import db
from core.lib.parsing import QueryError from core.lib.parsing import QueryError
@@ -12,36 +13,6 @@ from .models import NotificationRule, NotificationSettings, User
# flake8: noqa: E501 # flake8: noqa: E501
class RestrictedFormMixin:
"""
This mixin is used to restrict the queryset of a form to the current user.
The request object is passed from the view.
Fieldargs is used to pass additional arguments to the queryset filter.
"""
fieldargs = {}
def __init__(self, *args, **kwargs):
# self.fieldargs = {}
self.request = kwargs.pop("request")
super().__init__(*args, **kwargs)
for field in self.fields:
# Check it's not something like a CharField which has no queryset
if not hasattr(self.fields[field], "queryset"):
continue
model = self.fields[field].queryset.model
# Check if the model has a user field
try:
model._meta.get_field("user")
# Add the user to the queryset filters
self.fields[field].queryset = model.objects.filter(
user=self.request.user, **self.fieldargs.get(field, {})
)
except FieldDoesNotExist:
pass
class NewUserForm(UserCreationForm): class NewUserForm(UserCreationForm):
email = forms.EmailField(required=True) email = forms.EmailField(required=True)
@@ -71,6 +42,10 @@ class CustomUserCreationForm(UserCreationForm):
class NotificationSettingsForm(RestrictedFormMixin, ModelForm): class NotificationSettingsForm(RestrictedFormMixin, ModelForm):
def __init__(self, *args, **kwargs):
super(NotificationSettingsForm, self).__init__(*args, **kwargs)
self.fields["url"].label = "URL"
class Meta: class Meta:
model = NotificationSettings model = NotificationSettings
fields = ( fields = (
@@ -96,6 +71,10 @@ class NotificationSettingsForm(RestrictedFormMixin, ModelForm):
class NotificationRuleForm(RestrictedFormMixin, ModelForm): class NotificationRuleForm(RestrictedFormMixin, ModelForm):
def __init__(self, *args, **kwargs):
super(NotificationRuleForm, self).__init__(*args, **kwargs)
self.fields["url"].label = "URL"
class Meta: class Meta:
model = NotificationRule model = NotificationRule
fields = ( fields = (
@@ -108,7 +87,8 @@ class NotificationRuleForm(RestrictedFormMixin, ModelForm):
"topic", "topic",
"url", "url",
"service", "service",
"send_empty", "policy",
"ingest",
"enabled", "enabled",
) )
help_texts = { help_texts = {
@@ -122,11 +102,14 @@ class NotificationRuleForm(RestrictedFormMixin, ModelForm):
"interval": "How often to run the search. On demand evaluates messages as they are received, without running a scheduled search. The remaining options schedule a search of the database with the window below.", "interval": "How often to run the search. On demand evaluates messages as they are received, without running a scheduled search. The remaining options schedule a search of the database with the window below.",
"window": "Time window to search: 1d, 1h, 1m, 1s, etc.", "window": "Time window to search: 1d, 1h, 1m, 1s, etc.",
"amount": "Amount of matches to be returned for scheduled queries. Cannot be used with on-demand queries.", "amount": "Amount of matches to be returned for scheduled queries. Cannot be used with on-demand queries.",
"send_empty": "Send a notification if no matches are found.", "policy": "When to trigger this policy.",
"ingest": "Whether to ingest matches.",
} }
def clean(self): def clean(self):
cleaned_data = super(NotificationRuleForm, self).clean() cleaned_data = super(NotificationRuleForm, self).clean()
# TODO: should this be in rules.py?
if "service" in cleaned_data: if "service" in cleaned_data:
if cleaned_data["service"] == "webhook": if cleaned_data["service"] == "webhook":
if not cleaned_data.get("url"): if not cleaned_data.get("url"):

View File

@@ -4,7 +4,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should = [] extra_should = []
extra_should2 = [] extra_should2 = []
if num: if num:
extra_must.append({"match_phrase": {"num": num}}) extra_must.append({"equals": {"num": num}})
if net: if net:
extra_must.append({"match_phrase": {"net": net}}) extra_must.append({"match_phrase": {"net": net}})
if channel: if channel:
@@ -52,7 +52,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should.append({"match": {"nick": channel}}) extra_should.append({"match": {"nick": channel}})
else: else:
for ctype in types: for ctype in types:
extra_should.append({"match": {"mtype": ctype}}) extra_should.append({"equals": {"mtype": ctype}})
else: else:
for ctype in types: for ctype in types:
extra_should.append({"match": {"type": ctype}}) extra_should.append({"match": {"type": ctype}})
@@ -84,4 +84,5 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}}) query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2: if extra_should2:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}}) query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}})
return query return query

View File

@@ -56,9 +56,11 @@ def webhook_sendmsg(**kwargs):
msg = kwargs.get("msg", None) msg = kwargs.get("msg", None)
notification_settings = kwargs.get("notification_settings") notification_settings = kwargs.get("notification_settings")
url = notification_settings.get("url") url = notification_settings.get("url")
headers = {"Content-type": "application/json"}
try: try:
requests.post( requests.post(
f"{url}", f"{url}",
headers=headers,
data=msg, data=msg,
) )
except requests.exceptions.ConnectionError as e: except requests.exceptions.ConnectionError as e:
@@ -95,6 +97,9 @@ def sendmsg(**kwargs):
return return
service = notification_settings.get("service") service = notification_settings.get("service")
if service == "none":
# Don't send anything
return
if service == "ntfy": if service == "ntfy":
ntfy_sendmsg(**kwargs) ntfy_sendmsg(**kwargs)

View File

@@ -90,6 +90,7 @@ def parse_index(user, query_params, raise_error=False):
} }
else: else:
index = settings.INDEX_MAIN index = settings.INDEX_MAIN
return index return index
@@ -98,6 +99,7 @@ def parse_source(user, query_params, raise_error=False):
if "source" in query_params: if "source" in query_params:
source = query_params["source"] source = query_params["source"]
# Validate permissions for restricted sources
if source in settings.SOURCES_RESTRICTED: if source in settings.SOURCES_RESTRICTED:
if not user.has_perm("core.restricted_sources"): if not user.has_perm("core.restricted_sources"):
message = f"Access denied: {source}" message = f"Access denied: {source}"
@@ -105,6 +107,8 @@ def parse_source(user, query_params, raise_error=False):
raise QueryError(message) raise QueryError(message)
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
# Check validity of source
elif source not in settings.MAIN_SOURCES: elif source not in settings.MAIN_SOURCES:
message = f"Invalid source: {source}" message = f"Invalid source: {source}"
if raise_error: if raise_error:
@@ -118,11 +122,17 @@ def parse_source(user, query_params, raise_error=False):
if source: if source:
sources = [source] sources = [source]
else: else:
# Here we need to populate what "all" means for the user.
# They may only have access to a subset of the sources.
# We build a custom source list with ones they have access
# to, and then remove "all" from the list.
sources = list(settings.MAIN_SOURCES) sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"): if user.has_perm("core.restricted_sources"):
# If the user can use restricted sources, add them in.
for source_iter in settings.SOURCES_RESTRICTED: for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter) sources.append(source_iter)
# Get rid of "all", it's just a meta-source
if "all" in sources: if "all" in sources:
sources.remove("all") sources.remove("all")

View File

@@ -8,10 +8,11 @@ try:
except ImportError: except ImportError:
from yaml import Loader, Dumper from yaml import Loader, Dumper
import uuid
from copy import deepcopy
from datetime import datetime from datetime import datetime
import orjson import orjson
from asgiref.sync import async_to_sync
from siphashc import siphash from siphashc import siphash
from core.lib.notify import sendmsg from core.lib.notify import sendmsg
@@ -44,14 +45,18 @@ def format_ntfy(**kwargs):
rule: The rule object, must be specified rule: The rule object, must be specified
index: The index the rule matched on, can be None index: The index the rule matched on, can be None
message: The message to send, can be None message: The message to send, can be None
matched: The matched fields, can be None meta:
total_hits: The total number of matches, optional matched: The matched fields, can be None
total_hits: The total number of matches, optional
""" """
rule = kwargs.get("rule") rule = kwargs.get("rule")
index = kwargs.get("index") index = kwargs.get("index")
message = kwargs.get("message") message = kwargs.get("message")
matched = kwargs.get("matched")
total_hits = kwargs.get("total_hits", 0) meta = kwargs.get("meta", {})
total_hits = meta.get("total_hits", 0)
matched = meta.get("matched")
if message: if message:
# Dump the message in YAML for readability # Dump the message in YAML for readability
messages_formatted = "" messages_formatted = ""
@@ -88,25 +93,22 @@ def format_webhook(**kwargs):
rule: The rule object, must be specified rule: The rule object, must be specified
index: The index the rule matched on, can be None index: The index the rule matched on, can be None
message: The message to send, can be None, but will be sent as None message: The message to send, can be None, but will be sent as None
matched: The matched fields, can be None, but will be sent as None meta:
total_hits: The total number of matches, optional matched: The matched fields, can be None, but will be sent as None
total_hits: The total number of matches, optional
notification_settings: The notification settings, must be specified notification_settings: The notification settings, must be specified
priority: The priority of the message, optional priority: The priority of the message, optional
topic: The topic of the message, optional topic: The topic of the message, optional
""" """
rule = kwargs.get("rule") # rule = kwargs.get("rule")
index = kwargs.get("index") # index = kwargs.get("index")
message = kwargs.get("message") message = kwargs.get("message")
matched = kwargs.get("matched") meta = kwargs.get("meta")
total_hits = kwargs.get("total_hits", 0)
notification_settings = kwargs.get("notification_settings") notification_settings = kwargs.get("notification_settings")
notify_message = { notify_message = {
"rule_id": rule.id,
"rule_name": rule.name,
"match": matched,
"total_hits": total_hits,
"index": index,
"data": message, "data": message,
"meta": meta,
} }
if "priority" in notification_settings: if "priority" in notification_settings:
notify_message["priority"] = notification_settings["priority"] notify_message["priority"] = notification_settings["priority"]
@@ -140,6 +142,15 @@ def rule_notify(rule, index, message, meta=None):
if not notification_settings: if not notification_settings:
# No/invalid notification settings, don't send anything # No/invalid notification settings, don't send anything
return return
if notification_settings.get("service") == "none":
# Don't send anything
return
# double sigh
message_copy = deepcopy(message)
for index, _ in enumerate(message_copy):
if "meta" in message_copy[index]:
del message_copy[index]["meta"]
# Create a cast we can reuse for the formatting helpers and sendmsg # Create a cast we can reuse for the formatting helpers and sendmsg
cast = { cast = {
@@ -147,14 +158,11 @@ def rule_notify(rule, index, message, meta=None):
"user": rule.user, "user": rule.user,
"rule": rule, "rule": rule,
"index": index, "index": index,
"message": message, "message": message_copy,
"notification_settings": notification_settings, "notification_settings": notification_settings,
} }
if meta: if meta:
if "matched" in meta: cast["meta"] = meta
cast["matched"] = meta["matched"]
if "total_hits" in meta:
cast["total_hits"] = meta["total_hits"]
if rule.service == "ntfy": if rule.service == "ntfy":
cast["msg"] = format_ntfy(**cast) cast["msg"] = format_ntfy(**cast)
@@ -179,6 +187,7 @@ class NotificationRuleData(object):
self.db = db self.db = db
self.data = self.cleaned_data.get("data") self.data = self.cleaned_data.get("data")
self.window = self.cleaned_data.get("window") self.window = self.cleaned_data.get("window")
self.policy = self.cleaned_data.get("policy")
self.parsed = None self.parsed = None
self.aggs = {} self.aggs = {}
@@ -192,6 +201,13 @@ class NotificationRuleData(object):
if self.object is not None: if self.object is not None:
self.populate_matched() self.populate_matched()
def clear_database_matches(self):
"""
Delete all matches for this rule.
"""
rule_id = str(self.object.id)
self.db.delete_rule_entries(rule_id)
def populate_matched(self): def populate_matched(self):
""" """
On first creation, the match field is None. We need to populate it with On first creation, the match field is None. We need to populate it with
@@ -204,6 +220,35 @@ class NotificationRuleData(object):
self.object.match[index] = False self.object.match[index] = False
self.object.save() self.object.save()
def format_matched(self, messages):
matched = {}
for message in messages:
for field, value in self.parsed.items():
if field == "msg":
# Allow partial matches for msg
for msg in value:
if "msg" in message:
if msg.lower() in message["msg"].lower():
matched[field] = msg
# Break out of the msg matching loop
break
# Continue to next field
continue
if field == "tokens":
# Allow partial matches for tokens
for token in value:
if "tokens" in message:
if token.lower() in [x.lower() for x in message["tokens"]]:
matched[field] = token
# Break out of the token matching loop
break
# Continue to next field
continue
if field in message and message[field] in value:
# Do exact matches for all other fields
matched[field] = message[field]
return matched
def store_match(self, index, match): def store_match(self, index, match):
""" """
Store a match result. Store a match result.
@@ -244,7 +289,10 @@ class NotificationRuleData(object):
if index is None: if index is None:
# Check if we have any matches on all indices # Check if we have any matches on all indices
return any(self.object.match.values()) values = self.object.match.values()
if not values:
return None
return any(values)
# Check if it's the same hash # Check if it's the same hash
if match is not None: if match is not None:
@@ -253,7 +301,15 @@ class NotificationRuleData(object):
hash_matches = self.object.match.get(index) == match hash_matches = self.object.match.get(index) == match
return hash_matches return hash_matches
return self.object.match.get(index) returned_match = self.object.match.get(index, None)
if type(returned_match) == int:
# We are getting a hash from the database,
# but we have nothing to check it against.
# In this instance, we are checking if we got a match
# at all last time. We can confidently say that since
# we have a hash, we did.
returned_match = True
return returned_match
def format_aggs(self, aggs): def format_aggs(self, aggs):
""" """
@@ -269,23 +325,30 @@ class NotificationRuleData(object):
""" """
new_aggs = {} new_aggs = {}
for agg_name, agg in aggs.items(): for agg_name, agg in aggs.items():
# Already checked membership below if agg_name in self.aggs:
op, value = self.aggs[agg_name] op, value = self.aggs[agg_name]
new_aggs[agg_name] = f"{agg['value']}{op}{value}" new_aggs[agg_name] = f"{agg['value']}{op}{value}"
return return new_aggs
def reform_matches(self, index, matches, meta, mode): def reform_matches(self, index, matches, meta, mode):
if not isinstance(matches, list): if not isinstance(matches, list):
matches = [matches] matches = [matches]
matches_copy = matches.copy() matches_copy = matches.copy()
match_ts = datetime.utcnow().isoformat() # match_ts = datetime.utcnow().isoformat()
match_ts = int(datetime.utcnow().timestamp())
batch_id = uuid.uuid4()
# Filter empty fields in meta
meta = {k: v for k, v in meta.items() if v}
for match_index, _ in enumerate(matches_copy): for match_index, _ in enumerate(matches_copy):
matches_copy[match_index]["index"] = index matches_copy[match_index]["index"] = index
matches_copy[match_index]["rule_uuid"] = self.object.id matches_copy[match_index]["rule_id"] = str(self.object.id)
matches_copy[match_index]["meta"] = meta matches_copy[match_index]["meta"] = meta
matches_copy[match_index]["match_ts"] = match_ts matches_copy[match_index]["match_ts"] = match_ts
matches_copy[match_index]["mode"] = mode matches_copy[match_index]["mode"] = mode
matches_copy[match_index]["batch_id"] = str(batch_id)
return matches_copy return matches_copy
async def ingest_matches(self, index, matches, meta, mode): async def ingest_matches(self, index, matches, meta, mode):
@@ -294,8 +357,9 @@ class NotificationRuleData(object):
:param index: the index to store the matches for :param index: the index to store the matches for
:param matches: the matches to store :param matches: the matches to store
""" """
new_matches = self.reform_matches(index, matches, meta, mode) # new_matches = self.reform_matches(index, matches, meta, mode)
await self.db.async_store_matches(new_matches) if self.object.ingest:
await self.db.async_store_matches(matches)
def ingest_matches_sync(self, index, matches, meta, mode): def ingest_matches_sync(self, index, matches, meta, mode):
""" """
@@ -303,8 +367,9 @@ class NotificationRuleData(object):
:param index: the index to store the matches for :param index: the index to store the matches for
:param matches: the matches to store :param matches: the matches to store
""" """
new_matches = self.reform_matches(index, matches, meta, mode) # new_matches = self.reform_matches(index, matches, meta, mode)
self.db.store_matches(new_matches) if self.object.ingest:
self.db.store_matches(matches)
async def rule_matched(self, index, message, meta, mode): async def rule_matched(self, index, message, meta, mode):
""" """
@@ -317,13 +382,34 @@ class NotificationRuleData(object):
""" """
current_match = self.get_match(index, message) current_match = self.get_match(index, message)
log.debug(f"Rule matched: {index} - current match: {current_match}") log.debug(f"Rule matched: {index} - current match: {current_match}")
if current_match is False:
# Matched now, but not before last_run_had_matches = current_match is True
if "matched" not in meta:
meta["matched"] = self.format_aggs(meta["aggs"]) if self.policy in ["change", "default"]:
rule_notify(self.object, index, message, meta) # Change or Default policy, notifying only on new results
self.store_match(index, message) if last_run_had_matches:
await self.ingest_matches(index, message, meta, mode) # Last run had matches, and this one did too
# We don't need to notify
return
elif self.policy == "always":
# Only here for completeness, we notify below by default
pass
# We hit the return above if we don't need to notify
if "matched" not in meta:
meta["matched"] = self.format_matched(message)
if "aggs" in meta:
aggs_formatted = self.format_aggs(meta["aggs"])
if aggs_formatted:
meta["matched_aggs"] = aggs_formatted
meta["is_match"] = True
self.store_match(index, message)
message = self.reform_matches(index, message, meta, mode)
rule_notify(self.object, index, message, meta)
await self.ingest_matches(index, message, meta, mode)
def rule_matched_sync(self, index, message, meta, mode): def rule_matched_sync(self, index, message, meta, mode):
""" """
@@ -336,15 +422,37 @@ class NotificationRuleData(object):
""" """
current_match = self.get_match(index, message) current_match = self.get_match(index, message)
log.debug(f"Rule matched: {index} - current match: {current_match}") log.debug(f"Rule matched: {index} - current match: {current_match}")
if current_match is False:
# Matched now, but not before
if "matched" not in meta:
meta["matched"] = self.format_aggs(meta["aggs"])
rule_notify(self.object, index, message, meta)
self.store_match(index, message)
self.ingest_matches_sync(index, message, meta, mode)
async def rule_no_match(self, index=None): last_run_had_matches = current_match is True
if self.policy in ["change", "default"]:
# Change or Default policy, notifying only on new results
if last_run_had_matches:
# Last run had matches, and this one did too
# We don't need to notify
return
elif self.policy == "always":
# Only here for completeness, we notify below by default
pass
# We hit the return above if we don't need to notify
if "matched" not in meta:
meta["matched"] = self.format_matched(message)
if "aggs" in meta:
aggs_formatted = self.format_aggs(meta["aggs"])
if aggs_formatted:
meta["matched_aggs"] = aggs_formatted
meta["is_match"] = True
self.store_match(index, message)
message = self.reform_matches(index, message, meta, mode)
rule_notify(self.object, index, message, meta)
self.ingest_matches_sync(index, message, meta, mode)
# No async helper for this one as we only need it for schedules
async def rule_no_match(self, index=None, message=None, mode=None):
""" """
A rule has not matched. A rule has not matched.
If the previous run did match, send a notification if configured to notify If the previous run did match, send a notification if configured to notify
@@ -353,12 +461,34 @@ class NotificationRuleData(object):
""" """
current_match = self.get_match(index) current_match = self.get_match(index)
log.debug(f"Rule not matched: {index} - current match: {current_match}") log.debug(
if current_match is True: f"Rule not matched: {index} - current match: {current_match}: {message}"
# Matched before, but not now )
if self.object.send_empty:
rule_notify(self.object, index, "no_match", None) last_run_had_matches = current_match is True
self.store_match(index, False) initial = current_match is None
self.store_match(index, False)
if self.policy != "always":
# We hit the return above if we don't need to notify
if self.policy in ["change", "default"]:
if not last_run_had_matches and not initial:
# We don't need to notify if the last run didn't have matches
return
if self.policy in ["always", "change"]:
# Never notify for empty matches on default policy
meta = {"msg": message, "is_match": False}
matches = [{"msg": None}]
message = self.reform_matches(index, matches, meta, mode)
rule_notify(self.object, index, matches, meta)
await self.ingest_matches(
index=index,
matches=matches,
meta=meta,
mode="schedule",
)
async def run_schedule(self): async def run_schedule(self):
""" """
@@ -369,11 +499,17 @@ class NotificationRuleData(object):
response = await self.db.schedule_query_results(self) response = await self.db.schedule_query_results(self)
if not response: if not response:
# No results in the result_map # No results in the result_map
await self.rule_no_match() await self.rule_no_match(
message="No response from database", mode="schedule"
)
return
for index, (meta, results) in response.items(): for index, (meta, results) in response.items():
if not results: if not results:
# Falsy results, no matches # Falsy results, no matches
await self.rule_no_match(index) await self.rule_no_match(
index, message="No results for index", mode="schedule"
)
continue
# Add the match values of all aggregations to a list # Add the match values of all aggregations to a list
aggs_for_index = [] aggs_for_index = []
@@ -391,18 +527,17 @@ class NotificationRuleData(object):
) )
continue continue
# Default branch, since the happy path has a continue keyword # Default branch, since the happy path has a continue keyword
await self.rule_no_match(index) await self.rule_no_match(
index, message="Aggregation did not match", mode="schedule"
)
def test_schedule(self): def test_schedule(self):
""" """
Test the schedule query to ensure it is valid. Test the schedule query to ensure it is valid.
Run the query with the async_to_sync helper so we can call it from
a form.
Raises an exception if the query is invalid. Raises an exception if the query is invalid.
""" """
if self.db: if self.db:
sync_schedule = async_to_sync(self.db.schedule_query_results) self.db.schedule_query_results_test_sync(self)
sync_schedule(self)
def validate_schedule_fields(self): def validate_schedule_fields(self):
""" """
@@ -465,6 +600,14 @@ class NotificationRuleData(object):
raise RuleParseError( raise RuleParseError(
"Field tags cannot be used with on-demand rules", "data" "Field tags cannot be used with on-demand rules", "data"
) )
if self.policy != "default":
raise RuleParseError(
(
f"Cannot use {self.cleaned_data['policy']} policy with "
"on-demand rules"
),
"policy",
)
@property @property
def is_schedule(self): def is_schedule(self):

View File

@@ -1,5 +1,6 @@
import msgpack import msgpack
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.conf import settings
from redis import StrictRedis from redis import StrictRedis
from core.db.storage import db from core.db.storage import db
@@ -93,7 +94,13 @@ def process_rules(data):
class Command(BaseCommand): class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=10) # To match Monolith DB
# r = StrictRedis(
# host=settings.REDIS_HOST,
# port=settings.REDIS_PORT,
# password=settings.REDIS_PASSWORD,
# db=settings.REDIS_DB
# )
p = r.pubsub() p = r.pubsub()
p.psubscribe("messages") p.psubscribe("messages")
for message in p.listen(): for message in p.listen():

View File

@@ -44,8 +44,11 @@ class Command(BaseCommand):
for interval in INTERVALS: for interval in INTERVALS:
log.debug(f"Scheduling {interval} second job") log.debug(f"Scheduling {interval} second job")
scheduler.add_job(job, "interval", seconds=interval, args=[interval]) scheduler.add_job(job, "interval", seconds=interval, args=[interval])
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
scheduler._eventloop = loop
scheduler.start() scheduler.start()
loop = asyncio.get_event_loop()
try: try:
loop.run_forever() loop.run_forever()
except (KeyboardInterrupt, SystemExit): except (KeyboardInterrupt, SystemExit):

View File

@@ -0,0 +1,28 @@
# Generated by Django 4.1.5 on 2023-02-09 14:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0025_alter_notificationrule_id'),
]
operations = [
migrations.AddField(
model_name='notificationrule',
name='policy',
field=models.CharField(choices=[('default', 'Only trigger for matched events'), ('change', 'Trigger only if no results found when they were last run'), ('always', 'Always trigger regardless of whether results are found')], default='default', max_length=255),
),
migrations.AlterField(
model_name='notificationrule',
name='service',
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='ntfy', max_length=255),
),
migrations.AlterField(
model_name='notificationsettings',
name='service',
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='ntfy', max_length=255),
),
]

View File

@@ -0,0 +1,28 @@
# Generated by Django 4.1.6 on 2023-02-13 10:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0026_notificationrule_policy_and_more'),
]
operations = [
migrations.AlterField(
model_name='notificationrule',
name='policy',
field=models.CharField(choices=[('default', 'Default: Trigger only when there were no results last time'), ('change', 'Change: Default + trigger when there are no results (if there were before)'), ('always', 'Always: Trigger on every run (not recommended for low intervals)')], default='default', max_length=255),
),
migrations.AlterField(
model_name='notificationrule',
name='topic',
field=models.CharField(blank=True, max_length=2048, null=True),
),
migrations.AlterField(
model_name='notificationsettings',
name='topic',
field=models.CharField(blank=True, max_length=2048, null=True),
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 4.1.6 on 2023-02-13 21:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0027_alter_notificationrule_policy_and_more'),
]
operations = [
migrations.RenameField(
model_name='notificationrule',
old_name='send_empty',
new_name='ingest',
),
migrations.AlterField(
model_name='notificationrule',
name='interval',
field=models.IntegerField(choices=[(0, 'On demand'), (5, 'Every 5 seconds'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=60),
),
migrations.AlterField(
model_name='notificationrule',
name='service',
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='webhook', max_length=255),
),
migrations.AlterField(
model_name='notificationrule',
name='window',
field=models.CharField(blank=True, default='30d', max_length=255, null=True),
),
]

View File

@@ -39,6 +39,16 @@ INTERVAL_CHOICES = (
SERVICE_CHOICES = ( SERVICE_CHOICES = (
("ntfy", "NTFY"), ("ntfy", "NTFY"),
("webhook", "Custom webhook"), ("webhook", "Custom webhook"),
("none", "Disabled"),
)
POLICY_CHOICES = (
("default", "Default: Trigger only when there were no results last time"),
(
"change",
"Change: Default + trigger when there are no results (if there were before)",
),
("always", "Always: Trigger on every run (not recommended for low intervals)"),
) )
@@ -68,8 +78,9 @@ class User(AbstractUser):
""" """
Override the save function to create a Stripe customer. Override the save function to create a Stripe customer.
""" """
if not self.stripe_id: # stripe ID not stored if settings.BILLING_ENABLED:
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name) if not self.stripe_id: # stripe ID not stored
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name)
to_update = {} to_update = {}
if self.email != self._original.email: if self.email != self._original.email:
@@ -79,14 +90,16 @@ class User(AbstractUser):
if self.last_name != self._original.last_name: if self.last_name != self._original.last_name:
to_update["last_name"] = self.last_name to_update["last_name"] = self.last_name
update_customer_fields(self.stripe_id, **to_update) if settings.BILLING_ENABLED:
update_customer_fields(self.stripe_id, **to_update)
super().save(*args, **kwargs) super().save(*args, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
if self.stripe_id: if settings.BILLING_ENABLED:
stripe.Customer.delete(self.stripe_id) if self.stripe_id:
logger.info(f"Deleted Stripe customer {self.stripe_id}") stripe.Customer.delete(self.stripe_id)
logger.info(f"Deleted Stripe customer {self.stripe_id}")
super().delete(*args, **kwargs) super().delete(*args, **kwargs)
def has_plan(self, plan): def has_plan(self, plan):
@@ -182,16 +195,19 @@ class NotificationRule(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE) user = models.ForeignKey(User, on_delete=models.CASCADE)
name = models.CharField(max_length=255) name = models.CharField(max_length=255)
priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1) priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1)
topic = models.CharField(max_length=255, null=True, blank=True) topic = models.CharField(max_length=2048, null=True, blank=True)
url = models.CharField(max_length=1024, null=True, blank=True) url = models.CharField(max_length=1024, null=True, blank=True)
interval = models.IntegerField(choices=INTERVAL_CHOICES, default=0) interval = models.IntegerField(choices=INTERVAL_CHOICES, default=60)
window = models.CharField(max_length=255, null=True, blank=True) window = models.CharField(max_length=255, default="30d", null=True, blank=True)
amount = models.PositiveIntegerField(default=1, null=True, blank=True) amount = models.PositiveIntegerField(default=1, null=True, blank=True)
enabled = models.BooleanField(default=True) enabled = models.BooleanField(default=True)
data = models.TextField() data = models.TextField()
match = models.JSONField(null=True, blank=True) match = models.JSONField(null=True, blank=True)
service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy") service = models.CharField(
send_empty = models.BooleanField(default=False) choices=SERVICE_CHOICES, max_length=255, default="webhook"
)
ingest = models.BooleanField(default=False)
policy = models.CharField(choices=POLICY_CHOICES, max_length=255, default="default")
def __str__(self): def __str__(self):
return f"{self.user} - {self.name}" return f"{self.user} - {self.name}"
@@ -227,8 +243,6 @@ class NotificationRule(models.Model):
user_settings["url"] = self.url user_settings["url"] = self.url
if self.service is not None: if self.service is not None:
user_settings["service"] = self.service user_settings["service"] = self.service
if self.send_empty is not None:
user_settings["send_empty"] = self.send_empty
if check: if check:
if user_settings["service"] == "ntfy" and user_settings["topic"] is None: if user_settings["service"] == "ntfy" and user_settings["topic"] is None:
@@ -240,7 +254,7 @@ class NotificationRule(models.Model):
class NotificationSettings(models.Model): class NotificationSettings(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE) user = models.OneToOneField(User, on_delete=models.CASCADE)
topic = models.CharField(max_length=255, null=True, blank=True) topic = models.CharField(max_length=2048, null=True, blank=True)
url = models.CharField(max_length=1024, null=True, blank=True) url = models.CharField(max_length=1024, null=True, blank=True)
service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy") service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy")

View File

@@ -66,10 +66,11 @@ $(document).ready(function(){
"file_size": "off", "file_size": "off",
"lang_code": "off", "lang_code": "off",
"tokens": "off", "tokens": "off",
"rule_uuid": "off", "rule_id": "off",
"index": "off", "index": "off",
"meta": "off", "meta": "off",
"match_ts": "off", "match_ts": "off",
"batch_id": "off"
//"lang_name": "off", //"lang_name": "off",
// "words_noun": "off", // "words_noun": "off",
// "words_adj": "off", // "words_adj": "off",

View File

@@ -1,333 +1,346 @@
{% load static %} {% load static %}
{% load has_plan %} {% load has_plan %}
{% load cache %}
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en-GB"> <html lang="en-GB">
<head> {% cache 600 head request.path_info %}
<meta charset="utf-8"> <head>
<meta name="viewport" content="width=device-width, initial-scale=1"> <meta charset="utf-8">
<title>Pathogen - {{ request.path_info }}</title> <meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="shortcut icon" href="{% static 'favicon.ico' %}"> <title>Pathogen - {{ request.path_info }}</title>
<link rel="manifest" href="{% static 'manifest.webmanifest' %}"> <link rel="shortcut icon" href="{% static 'favicon.ico' %}">
<link rel="stylesheet" href="{% static 'css/bulma.min.css' %}"> <link rel="manifest" href="{% static 'manifest.webmanifest' %}">
<link rel="stylesheet" href="{% static 'css/bulma-tooltip.min.css' %}"> <link rel="stylesheet" href="{% static 'css/bulma.min.css' %}">
<link rel="stylesheet" href="https://site-assets.fontawesome.com/releases/v6.1.1/css/all.css"> <link rel="stylesheet" href="{% static 'css/bulma-tooltip.min.css' %}">
<link rel="stylesheet" href="{% static 'css/bulma-slider.min.css' %}"> <link rel="stylesheet" href="https://site-assets.fontawesome.com/releases/v6.1.1/css/all.css">
<link rel="stylesheet" href="{% static 'css/bulma-calendar.min.css' %}"> <link rel="stylesheet" href="{% static 'css/bulma-slider.min.css' %}">
<link rel="stylesheet" href="{% static 'css/bulma-tagsinput.min.css' %}"> <link rel="stylesheet" href="{% static 'css/bulma-calendar.min.css' %}">
<link rel="stylesheet" href="{% static 'css/bulma-switch.min.css' %}"> <link rel="stylesheet" href="{% static 'css/bulma-tagsinput.min.css' %}">
<link rel="stylesheet" href="{% static 'css/gridstack.min.css' %}"> <link rel="stylesheet" href="{% static 'css/bulma-switch.min.css' %}">
<script src="{% static 'js/bulma-calendar.min.js' %}" integrity="sha384-DThNif0xGXbopX7+PE+UabkuClfI/zELNhaVqoGLutaWB76dyMw0vIQBGmUxSfVQ" crossorigin="anonymous"></script> <link rel="stylesheet" href="{% static 'css/gridstack.min.css' %}">
<script src="{% static 'js/bulma-slider.min.js' %}" integrity="sha384-wbyps8iLG8QzJE02viYc/27BtT5HSa11+b5V7QPR1/huVuA8f4LRTNGc82qAIeIZ" crossorigin="anonymous"></script> <script src="{% static 'js/bulma-calendar.min.js' %}" integrity="sha384-DThNif0xGXbopX7+PE+UabkuClfI/zELNhaVqoGLutaWB76dyMw0vIQBGmUxSfVQ" crossorigin="anonymous"></script>
<script src="{% static 'js/htmx.min.js' %}" integrity="sha384-cZuAZ+ZbwkNRnrKi05G/fjBX+azI9DNOkNYysZ0I/X5ZFgsmMiBXgDZof30F5ofc" crossorigin="anonymous"></script> <script src="{% static 'js/bulma-slider.min.js' %}" integrity="sha384-wbyps8iLG8QzJE02viYc/27BtT5HSa11+b5V7QPR1/huVuA8f4LRTNGc82qAIeIZ" crossorigin="anonymous"></script>
<script defer src="{% static 'js/remove-me.js' %}" integrity="sha384-6fHcFNoQ8QEI3ZDgw9Z/A6Brk64gF7AnFbLgdrumo8/kBbsKQ/wo7wPegj5WkzuG" crossorigin="anonymous"></script> <script src="{% static 'js/htmx.min.js' %}" integrity="sha384-cZuAZ+ZbwkNRnrKi05G/fjBX+azI9DNOkNYysZ0I/X5ZFgsmMiBXgDZof30F5ofc" crossorigin="anonymous"></script>
<script defer src="{% static 'js/hyperscript.min.js' %}" integrity="sha384-6GYN8BDHOJkkru6zcpGOUa//1mn+5iZ/MyT6mq34WFIpuOeLF52kSi721q0SsYF9" crossorigin="anonymous"></script> <script defer src="{% static 'js/remove-me.js' %}" integrity="sha384-6fHcFNoQ8QEI3ZDgw9Z/A6Brk64gF7AnFbLgdrumo8/kBbsKQ/wo7wPegj5WkzuG" crossorigin="anonymous"></script>
<script src="{% static 'js/bulma-tagsinput.min.js' %}"></script> <script defer src="{% static 'js/hyperscript.min.js' %}" integrity="sha384-6GYN8BDHOJkkru6zcpGOUa//1mn+5iZ/MyT6mq34WFIpuOeLF52kSi721q0SsYF9" crossorigin="anonymous"></script>
<script src="{% static 'js/jquery.min.js' %}"></script> <script src="{% static 'js/bulma-tagsinput.min.js' %}"></script>
<script src="{% static 'js/gridstack-all.js' %}"></script> <script src="{% static 'js/jquery.min.js' %}"></script>
<script defer src="{% static 'js/magnet.min.js' %}"></script> <script src="{% static 'js/gridstack-all.js' %}"></script>
<script> <script defer src="{% static 'js/magnet.min.js' %}"></script>
document.addEventListener("restore-scroll", function(event) { <script>
var scrollpos = localStorage.getItem('scrollpos'); document.addEventListener("restore-scroll", function(event) {
if (scrollpos) { var scrollpos = localStorage.getItem('scrollpos');
window.scrollTo(0, scrollpos) if (scrollpos) {
}; window.scrollTo(0, scrollpos)
}); };
document.addEventListener("htmx:beforeSwap", function(event) {
localStorage.setItem('scrollpos', window.scrollY);
});
</script>
<script>
document.addEventListener('DOMContentLoaded', () => {
// Get all "navbar-burger" elements
const $navbarBurgers = Array.prototype.slice.call(document.querySelectorAll('.navbar-burger'), 0);
// Add a click event on each of them
$navbarBurgers.forEach( el => {
el.addEventListener('click', () => {
// Get the target from the "data-target" attribute
const target = el.dataset.target;
const $target = document.getElementById(target);
// Toggle the "is-active" class on both the "navbar-burger" and the "navbar-menu"
el.classList.toggle('is-active');
$target.classList.toggle('is-active');
});
}); });
}); document.addEventListener("htmx:beforeSwap", function(event) {
</script> localStorage.setItem('scrollpos', window.scrollY);
<style>
.icon { border-bottom: 0px !important;}
.wrap {
/* white-space: pre-wrap;
white-space: -moz-pre-wrap;
white-space: -pre-wrap;
white-space: -o-pre-wrap; */
word-wrap: break-word;
}
.nowrap-parent {
white-space: nowrap;
}
.nowrap-child {
display: inline-block;
}
.slider-output {
width: 4rem !important;
}
.htmx-indicator{
opacity:0;
transition: opacity 500ms ease-in;
}
.htmx-request .htmx-indicator{
opacity:1
}
.htmx-request.htmx-indicator{
opacity:1
}
.dropdown-content {
height: 20em;
overflow: auto;
}
table.relays-table tr:nth-of-type(2n) td {
border-bottom: 3px solid grey;
}
.tooltiptext { });
visibility: hidden; </script>
background-color: black; <script>
color: #fff; document.addEventListener('DOMContentLoaded', () => {
text-align: center;
padding: 5px 0;
border-radius: 6px;
position: absolute;
z-index: 1;
}
.rounded-tooltip:hover .tooltiptext { // Get all "navbar-burger" elements
visibility: visible; const $navbarBurgers = Array.prototype.slice.call(document.querySelectorAll('.navbar-burger'), 0);
}
#sentiment-container { // Add a click event on each of them
position: fixed; $navbarBurgers.forEach( el => {
top: 0; el.addEventListener('click', () => {
right: 0;
bottom: 0;
left: 0;
height: 100vh;
width: 100vw;
z-index: -2;
}
.table { // Get the target from the "data-target" attribute
background: transparent !important; const target = el.dataset.target;
} const $target = document.getElementById(target);
tr { // Toggle the "is-active" class on both the "navbar-burger" and the "navbar-menu"
transition: all 0.2s ease-in-out; el.classList.toggle('is-active');
} $target.classList.toggle('is-active');
tr:hover { });
cursor:pointer; });
background-color:rgba(221, 224, 255, 0.3) !important;
}
a.panel-block { });
transition: all 0.2s ease-in-out; </script>
} <style>
.icon { border-bottom: 0px !important;}
.wrap {
/* white-space: pre-wrap;
white-space: -moz-pre-wrap;
white-space: -pre-wrap;
white-space: -o-pre-wrap; */
word-wrap: break-word;
}
.nowrap-parent {
white-space: nowrap;
}
.nowrap-child {
display: inline-block;
}
.slider-output {
width: 4rem !important;
}
.htmx-indicator{
opacity:0;
transition: opacity 500ms ease-in;
}
.htmx-request .htmx-indicator{
opacity:1
}
.htmx-request.htmx-indicator{
opacity:1
}
.dropdown-content {
height: 20em;
overflow: auto;
}
table.relays-table tr:nth-of-type(2n) td {
border-bottom: 3px solid grey;
}
a.panel-block:hover { .tooltiptext {
cursor:pointer; visibility: hidden;
background-color:rgba(221, 224, 255, 0.3) !important; background-color: black;
} color: #fff;
text-align: center;
padding: 5px 0;
border-radius: 6px;
position: absolute;
z-index: 1;
}
.panel, .box, .modal { .rounded-tooltip:hover .tooltiptext {
background-color:rgba(250, 250, 250, 0.5) !important; visibility: visible;
} }
.modal, .modal.box{
background-color:rgba(210, 210, 210, 0.9) !important;
}
.modal-background{
background-color:rgba(255, 255, 255, 0.3) !important;
}
.has-background-grey-lighter{ #sentiment-container {
background-color:rgba(219, 219, 219, 0.5) !important; position: fixed;
} top: 0;
.navbar { right: 0;
background-color:rgba(0, 0, 0, 0.03) !important; bottom: 0;
} left: 0;
height: 100vh;
width: 100vw;
z-index: -2;
}
.grid-stack-item-content { .table {
display: flex !important; background: transparent !important;
flex-direction: column !important; }
overflow-x: hidden !important;
overflow-y: hidden !important;
}
.panel { tr {
display: flex !important; transition: all 0.2s ease-in-out;
flex-direction: column !important; }
overflow: hidden;
}
.panel-block { tr:hover {
overflow-y:auto; cursor:pointer;
overflow-x:auto; background-color:rgba(221, 224, 255, 0.3) !important;
min-height: 90%; }
display: block;
}
.floating-window { a.panel-block {
/* background-color:rgba(210, 210, 210, 0.6) !important; */ transition: all 0.2s ease-in-out;
display: flex !important; }
flex-direction: column !important;
overflow-x: hidden !important;
overflow-y: hidden !important;
max-height: 300px;
z-index: 9000;
position: absolute;
top: 50px;
left: 50px;
}
.floating-window .panel { a.panel-block:hover {
background-color:rgba(250, 250, 250, 0.8) !important; cursor:pointer;
} background-color:rgba(221, 224, 255, 0.3) !important;
}
.float-right { .panel, .box, .modal {
float: right; background-color:rgba(250, 250, 250, 0.5) !important;
padding-right: 5px; }
padding-left: 5px; .modal, .modal.box{
} background-color:rgba(210, 210, 210, 0.9) !important;
.grid-stack-item:hover .ui-resizable-handle { }
display: block !important; .modal-background{
} background-color:rgba(255, 255, 255, 0.3) !important;
.ui-resizable-handle { }
z-index: 39 !important;
}
</style> .has-background-grey-lighter{
<!-- Piwik --> {# Yes it's in the source, fight me #} background-color:rgba(219, 219, 219, 0.5) !important;
<script type="text/javascript"> }
var _paq = _paq || []; .navbar {
_paq.push(['trackPageView']); background-color:rgba(0, 0, 0, 0.03) !important;
_paq.push(['enableLinkTracking']); }
(function() {
_paq.push(['setTrackerUrl', 'https://api-a6fe73d3464641fe99ba77e5fdafa19c.s.zm.is']);
_paq.push(['setSiteId', 4]);
_paq.push(['setApiToken', 'je4TjsrunIM9uD4jrr_DGXJP4_b_Kq6ABhulOLo_Old']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.type='text/javascript'; g.async=true; g.defer=true; g.src='https://c87zpt9a74m181wto33r.s.zm.is/embed.js'; s.parentNode.insertBefore(g,s);
})();
</script>
<!-- End Piwik Code -->
</head> .grid-stack-item-content {
display: flex !important;
flex-direction: column !important;
overflow-x: hidden !important;
overflow-y: hidden !important;
}
.panel {
display: flex !important;
flex-direction: column !important;
overflow: hidden;
}
.panel-block {
overflow-y:auto;
overflow-x:auto;
min-height: 90%;
display: block;
}
.floating-window {
/* background-color:rgba(210, 210, 210, 0.6) !important; */
display: flex !important;
flex-direction: column !important;
overflow-x: hidden !important;
overflow-y: hidden !important;
max-height: 300px;
z-index: 9000;
position: absolute;
top: 50px;
left: 50px;
}
.floating-window .panel {
background-color:rgba(250, 250, 250, 0.8) !important;
}
.float-right {
float: right;
padding-right: 5px;
padding-left: 5px;
}
.grid-stack-item:hover .ui-resizable-handle {
display: block !important;
}
.ui-resizable-handle {
z-index: 39 !important;
}
.small-field {
overflow: hidden;
text-overflow: ellipsis;
overflow-y: hidden;
}
</style>
<!-- Piwik --> {# Yes it's in the source, fight me #}
<script type="text/javascript">
var _paq = _paq || [];
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
_paq.push(['setTrackerUrl', 'https://api-a6fe73d3464641fe99ba77e5fdafa19c.s.zm.is']);
_paq.push(['setSiteId', 4]);
_paq.push(['setApiToken', 'je4TjsrunIM9uD4jrr_DGXJP4_b_Kq6ABhulOLo_Old']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.type='text/javascript'; g.async=true; g.defer=true; g.src='https://c87zpt9a74m181wto33r.s.zm.is/embed.js'; s.parentNode.insertBefore(g,s);
})();
</script>
<!-- End Piwik Code -->
</head>
{% endcache %}
<body> <body>
{% cache 600 nav request.user.id %}
<nav class="navbar" role="navigation" aria-label="main navigation"> <nav class="navbar" role="navigation" aria-label="main navigation">
<div class="navbar-brand"> <div class="navbar-brand">
<a class="navbar-item" href="{% url 'home' %}">
<img src="{% static 'logo.svg' %}" width="112" height="28" alt="logo">
</a>
<a role="button" class="navbar-burger" aria-label="menu" aria-expanded="false" data-target="bar">
<span aria-hidden="true"></span>
<span aria-hidden="true"></span>
<span aria-hidden="true"></span>
</a>
</div>
<div id="bar" class="navbar-menu">
<div class="navbar-start">
<a class="navbar-item" href="{% url 'home' %}"> <a class="navbar-item" href="{% url 'home' %}">
Search <img src="{% static 'logo.svg' %}" width="112" height="28" alt="logo">
</a> </a>
<a class="navbar-item" href="{% url 'rules' type='page' %}">
Rules
</a>
{% if user.is_authenticated %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Account
</a>
<div class="navbar-dropdown"> <a role="button" class="navbar-burger" aria-label="menu" aria-expanded="false" data-target="bar">
<a class="navbar-item" href="{% url 'billing' %}"> <span aria-hidden="true"></span>
Billing <span aria-hidden="true"></span>
</a> <span aria-hidden="true"></span>
<a class="navbar-item" href="{% url 'notifications_update' type='page' %}">
Notifications
</a>
</div>
</div>
{% endif %}
{% if user.is_superuser %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Threshold
</a>
<div class="navbar-dropdown">
<a class="navbar-item" href="{% url 'threshold_irc_overview' %}">
IRC
</a>
<a class="navbar-item" href="#">
Discord
</a>
</div>
</div>
{% endif %}
{% if perms.core.use_insights %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Insights
</a>
<div class="navbar-dropdown">
{% for index in user.allowed_indices %}
{% if index != "meta" %}
<a class="navbar-item" href="{% url 'insights' index=index %}">
{{ index }}
</a>
{% endif %}
{% endfor %}
</div>
</div>
{% endif %}
<a class="navbar-item add-button">
Install
</a> </a>
</div> </div>
<div class="navbar-end"> <div id="bar" class="navbar-menu">
<div class="navbar-item"> <div class="navbar-start">
<div class="buttons"> <a class="navbar-item" href="{% url 'home' %}">
{% if not user.is_authenticated %} Search
<a class="button is-info" href="{% url 'signup' %}"> </a>
Sign up <a class="navbar-item" href="{% url 'rules' type='page' %}">
Rules
</a>
{% if user.is_authenticated %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Account
</a> </a>
<a class="button" href="{% url 'login' %}">
Log in <div class="navbar-dropdown">
<a class="navbar-item" href="{% url 'billing' %}">
Billing
</a>
<a class="navbar-item" href="{% url 'notifications_update' type='page' %}">
Notifications
</a>
</div>
</div>
{% endif %}
{% if user.is_superuser %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Manage
</a> </a>
{% endif %}
{% if user.is_authenticated %} <div class="navbar-dropdown">
<a class="button" href="{% url 'logout' %}">Logout</a> <a class="navbar-item" href="{% url 'threshold_irc_overview' %}">
{% endif %} IRC
</a>
<a class="navbar-item" href="#">
Discord
</a>
<a class="navbar-item" href="{% url 'monolith_stats' %}">
Stats
</a>
</div>
</div>
{% endif %}
{% if perms.core.use_insights %}
<div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link">
Insights
</a>
<div class="navbar-dropdown">
{% for index in user.allowed_indices %}
{% if index != "meta" %}
<a class="navbar-item" href="{% url 'insights' index=index %}">
{{ index }}
</a>
{% endif %}
{% endfor %}
</div>
</div>
{% endif %}
<a class="navbar-item add-button">
Install
</a>
</div>
<div class="navbar-end">
<div class="navbar-item">
<div class="buttons">
{% if not user.is_authenticated %}
<a class="button is-info" href="{% url 'signup' %}">
Sign up
</a>
<a class="button" href="{% url 'login' %}">
Log in
</a>
{% endif %}
{% if user.is_authenticated %}
<a class="button" href="{% url 'logout' %}">Logout</a>
{% endif %}
</div>
</div> </div>
</div> </div>
</div> </div>
</div> </nav>
</nav> {% endcache %}
<script> <script>
let deferredPrompt; let deferredPrompt;
const addBtn = document.querySelector('.add-button'); const addBtn = document.querySelector('.add-button');

View File

@@ -0,0 +1,15 @@
{% extends "base.html" %}
{% block content %}
<div
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{% url 'monolith_stats_db' type='page' %}"
hx-trigger="load, every 5s"
hx-target="#stats"
hx-swap="innerHTML">
</div>
<div class="box">
<div id="stats">
</div>
</div>
{% endblock %}

View File

@@ -0,0 +1,14 @@
{% extends 'mixins/partials/generic-detail.html' %}
{% block tbody %}
{% for item in object %}
{% if item.data %}
{% for row in item.data %}
<tr>
<th>{{ row.Variable_name }}</th>
<td>{{ row.Value }}</td>
</tr>
{% endfor %}
{% endif %}
{% endfor %}
{% endblock %}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/modal.html' %} {% extends 'mixins/wm/modal.html' %}
{% load index %} {% load index %}
{% load static %} {% load static %}
@@ -42,7 +42,7 @@
{% endblock %} {% endblock %}
{% block modal_content %} {% block modal_content %}
{% include 'partials/notify.html' %} {% include 'mixins/partials/notify.html' %}
<div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}"> <div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}">
<ul> <ul>
<li class="is-active" data-tab="1"> <li class="is-active" data-tab="1">

View File

@@ -1,4 +1,4 @@
{% extends 'wm/modal.html' %} {% extends 'mixins/wm/modal.html' %}
{% block modal_content %} {% block modal_content %}
{% include 'window-content/drilldown.html' %} {% include 'window-content/drilldown.html' %}

View File

@@ -1 +0,0 @@
<button class="modal-close is-large" aria-label="close"></button>

View File

@@ -1,3 +0,0 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
onclick='grid.removeWidget("widget-{{ unique }}");'></i>

View File

@@ -1,3 +0,0 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
data-script="on click remove the closest <nav/>"></i>

View File

@@ -1,48 +1,48 @@
{% load static %} {% load static %}
{% load cache %}
{% for plan in plans %} {% load cachalot cache %}
{% get_last_invalidation 'core.Plan' as last %}
{% cache 600 objects_plans request.user.id plans last %}
<div class="box"> {% for plan in plans %}
<article class="media"> <div class="box">
<div class="media-left"> <article class="media">
<figure class="image is-64x64"> <div class="media-left">
<img src="{% static plan.image %}" alt="Image"> <figure class="image is-64x64">
</figure> <img src="{% static plan.image %}" alt="Image">
</div> </figure>
<div class="media-content">
<div class="content">
<p>
<strong>{{ plan.name }}</strong> <small>£{{ plan.cost }}</small>
{% if plan in user_plans %}
<i class="fas fa-check" aria-hidden="true"></i>
{% endif %}
<br>
{{ plan.description }}
</p>
</div> </div>
<nav class="level is-mobile"> <div class="media-content">
<div class="level-left"> <div class="content">
{% if plan not in user_plans %} <p>
<a class="level-item" href="/order/{{ plan.name }}"> <strong>{{ plan.name }}</strong> <small>£{{ plan.cost }}</small>
<span class="icon is-small has-text-success"> {% if plan in user_plans %}
<i class="fas fa-plus" aria-hidden="true"></i> <i class="fas fa-check" aria-hidden="true"></i>
</span> {% endif %}
</a> <br>
{% endif %} {{ plan.description }}
</p>
{% if plan in user_plans %}
<a class="level-item" href="/cancel_subscription/{{ plan.name }}">
<span class="icon is-small has-text-info">
<i class="fas fa-cancel" aria-hidden="true"></i>
</span>
</a>
{% endif %}
</div> </div>
</nav> <nav class="level is-mobile">
</div> <div class="level-left">
</article> {% if plan not in user_plans %}
</div> <a class="level-item" href="/order/{{ plan.name }}">
{% endfor %} <span class="icon is-small has-text-success">
<i class="fas fa-plus" aria-hidden="true"></i>
</span>
</a>
{% endif %}
{% if plan in user_plans %}
<a class="level-item" href="/cancel_subscription/{{ plan.name }}">
<span class="icon is-small has-text-info">
<i class="fas fa-cancel" aria-hidden="true"></i>
</span>
</a>
{% endif %}
</div>
</nav>
</div>
</article>
</div>
{% endfor %}
{% endcache %}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/widget.html' %} {% extends 'mixins/wm/widget.html' %}
{% load static %} {% load static %}
{% block heading %} {% block heading %}
@@ -6,8 +6,7 @@
{% endblock %} {% endblock %}
{% block panel_content %} {% block panel_content %}
{% include 'partials/notify.html' %} {% include 'mixins/partials/notify.html' %}
<script src="{% static 'js/column-shifter.js' %}"></script>
{% if cache is not None %} {% if cache is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="Cached"> <span class="icon has-tooltip-bottom" data-tooltip="Cached">
<i class="fa-solid fa-database"></i> <i class="fa-solid fa-database"></i>

File diff suppressed because it is too large Load Diff

View File

@@ -1,93 +1,109 @@
{% include 'partials/notify.html' %} {% load cache %}
{% load cachalot cache %}
{% get_last_invalidation 'core.NotificationRule' as last %}
{% include 'mixins/partials/notify.html' %}
{% cache 600 objects_rules request.user.id object_list last %}
<table
class="table is-fullwidth is-hoverable"
hx-target="#{{ context_object_name }}-table"
id="{{ context_object_name }}-table"
hx-swap="outerHTML"
hx-trigger="{{ context_object_name_singular }}Event from:body"
hx-get="{{ list_url }}">
<thead>
<th>id</th>
<th>user</th>
<th>name</th>
<th>interval</th>
<th>window</th>
<th>priority</th>
<th>topic</th>
<th>enabled</th>
<th>ingest</th>
<th>data length</th>
<th>match</th>
<th>actions</th>
</thead>
{% for item in object_list %}
<tr>
<td><a href="/?query=*&source=all&rule={{ item.id }}">{{ item.id }}</a></td>
<td>{{ item.user }}</td>
<td>{{ item.name }}</td>
<td>{{ item.interval }}s</td>
<td>{{ item.window|default_if_none:"—" }}</td>
<td>{{ item.priority }}</td>
<td>{{ item.topic|default_if_none:"—" }}</td>
<td>
{% if item.enabled %}
<span class="icon">
<i class="fa-solid fa-check"></i>
</span>
{% else %}
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
{% endif %}
</td>
<td>
{% if item.ingest %}
<span class="icon">
<i class="fa-solid fa-check"></i>
</span>
{% else %}
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
{% endif %}
</td>
<td>{{ item.data|length }}</td>
<td>{{ item.matches }}</td>
<td>
<div class="buttons">
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{% url 'rule_update' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#{{ type }}s-here"
hx-swap="innerHTML"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-pencil"></i>
</span>
</span>
</button>
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{% url 'rule_delete' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete {{ item.name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
</span>
</button>
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'rule_clear' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to clear matches for {{ item.name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-arrow-rotate-right"></i>
</span>
</span>
</button>
</div>
</td>
</tr>
{% endfor %}
<table </table>
class="table is-fullwidth is-hoverable" {% endcache %}
hx-target="#{{ context_object_name }}-table"
id="{{ context_object_name }}-table"
hx-swap="outerHTML"
hx-trigger="{{ context_object_name_singular }}Event from:body"
hx-get="{{ list_url }}">
<thead>
<th>id</th>
<th>user</th>
<th>name</th>
<th>interval</th>
<th>window</th>
<th>priority</th>
<th>topic</th>
<th>enabled</th>
<th>data length</th>
<th>match</th>
<th>actions</th>
</thead>
{% for item in object_list %}
<tr>
<td><a href="/search/?rule={{ item.id }}&query=*&source=all">{{ item.id }}</a></td>
<td>{{ item.user }}</td>
<td>{{ item.name }}</td>
<td>{{ item.interval }}s</td>
<td>{{ item.window|default_if_none:"—" }}</td>
<td>{{ item.priority }}</td>
<td>{{ item.topic|default_if_none:"—" }}</td>
<td>
{% if item.enabled %}
<span class="icon">
<i class="fa-solid fa-check"></i>
</span>
{% else %}
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
{% endif %}
</td>
<td>{{ item.data|length }}</td>
<td>{{ item.matches }}</td>
<td>
<div class="buttons">
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{% url 'rule_update' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#{{ type }}s-here"
hx-swap="innerHTML"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-pencil"></i>
</span>
</span>
</button>
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{% url 'rule_delete' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete {{ item.name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
</span>
</button>
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'rule_clear' type=type pk=item.id %}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to clear matches for {{ item.name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-arrow-rotate-right"></i>
</span>
</span>
</button>
</div>
</td>
</tr>
{% endfor %}
</table>

View File

@@ -3,7 +3,7 @@
</div> </div>
{% if params.index != 'int' and params.index != 'meta' %} {% if params.index != 'int' and params.index != 'meta' %}
<div id="sentiment-container" {% if params.show_sentiment is None %} class="is-hidden" {% endif %}> <div id="sentiment-container" {% if params.graph is None %} class="is-hidden" {% endif %}>
<canvas id="sentiment-chart"></canvas> <canvas id="sentiment-chart"></canvas>
</div> </div>
<script src="{% static 'chart.js' %}"></script> <script src="{% static 'chart.js' %}"></script>

View File

@@ -19,7 +19,7 @@
hx-swap="outerHTML"> hx-swap="outerHTML">
</div> </div>
<div id="info"> <div id="info">
{% include 'partials/notify.html' %} {% include 'mixins/partials/notify.html' %}
{% if item is not None %} {% if item is not None %}
<div class="content" style="max-height: 30em; overflow: auto;"> <div class="content" style="max-height: 30em; overflow: auto;">
<div class="table-container"> <div class="table-container">

View File

@@ -1,7 +1,7 @@
{% extends "base.html" %} {% extends "base.html" %}
{% load static %} {% load static %}
{% block content %} {% block content %}
{% include 'partials/notify.html' %} {% include 'mixins/partials/notify.html' %}
<script src="{% static 'tabs.js' %}"></script> <script src="{% static 'tabs.js' %}"></script>
<style> <style>
.icon { border-bottom: 0px !important;} .icon { border-bottom: 0px !important;}

View File

@@ -1,4 +1,4 @@
{% extends 'wm/widget.html' %} {% extends 'mixins/wm/widget.html' %}
{% block widget_options %} {% block widget_options %}

View File

@@ -1,34 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
{% load crispy_forms_tags %}
{% load crispy_forms_bulma_field %}
<form
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{{ submit_url }}"
hx-target="#modals-here"
hx-swap="innerHTML">
{% csrf_token %}
{{ form|crispy }}
{% if hide_cancel is not True %}
<button
type="button"
class="button is-light modal-close-button">
Cancel
</button>
{% endif %}
<button type="submit" class="button modal-close-button">Submit</button>
</form>

View File

@@ -1,45 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
<div class="buttons">
{% if submit_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{{ submit_url }}"
hx-trigger="click"
hx-target="#modals-here"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-plus"></i>
</span>
<span>{{ title_singular }}</span>
</span>
</button>
{% endif %}
{% if delete_all_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{{ delete_all_url }}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
<span>Delete all {{ context_object_name }} </span>
</span>
</button>
{% endif %}
</div>
{% include detail_template %}

View File

@@ -1,45 +0,0 @@
{% include 'partials/notify.html' %}
{% if page_title is not None %}
<h1 class="title is-4">{{ page_title }}</h1>
{% endif %}
{% if page_subtitle is not None %}
<h1 class="subtitle">{{ page_subtitle }}</h1>
{% endif %}
<div class="buttons">
{% if submit_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{{ submit_url }}"
hx-trigger="click"
hx-target="#modals-here"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-plus"></i>
</span>
<span>{{ title_singular }}</span>
</span>
</button>
{% endif %}
{% if delete_all_url is not None %}
<button
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-delete="{{ delete_all_url }}"
hx-trigger="click"
hx-target="#modals-here"
hx-swap="innerHTML"
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
class="button">
<span class="icon-text">
<span class="icon">
<i class="fa-solid fa-xmark"></i>
</span>
<span>Delete all {{ context_object_name }} </span>
</span>
</button>
{% endif %}
</div>
{% include list_template %}

View File

@@ -1,6 +1,6 @@
{% load static %} {% load static %}
{% include 'partials/notify.html' %} {% include 'mixins/partials/notify.html' %}
{% if cache is not None %} {% if cache is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="Cached"> <span class="icon has-tooltip-bottom" data-tooltip="Cached">
<i class="fa-solid fa-database"></i> <i class="fa-solid fa-database"></i>

View File

@@ -258,7 +258,7 @@
id="sentiment_graph_switch" id="sentiment_graph_switch"
type="checkbox" type="checkbox"
class="switch is-rounded is-info" class="switch is-rounded is-info"
name="show_sentiment" name="graph"
data-script="on click toggle .is-hidden on #sentiment-container"> data-script="on click toggle .is-hidden on #sentiment-container">
<label <label

View File

@@ -1,4 +1,4 @@
{% extends 'wm/window.html' %} {% extends 'mixins/wm/window.html' %}
{% block heading %} {% block heading %}
Drilldown Drilldown

View File

@@ -1,20 +0,0 @@
{% load static %}
<script src="{% static 'modal.js' %}"></script>
{% block scripts %}
{% endblock %}
{% block styles %}
{% endblock %}
<div id="modal" class="modal is-active is-clipped">
<div class="modal-background"></div>
<div class="modal-content">
<div class="box">
{% block modal_content %}
{% include window_content %}
{% endblock %}
{% include 'partials/close-modal.html' %}
</div>
</div>
</div>

View File

@@ -1,6 +0,0 @@
{% extends "base.html" %}
{% block content %}
{% include window_content %}
{% endblock %}

View File

@@ -1,17 +0,0 @@
<nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %}
{% include 'partials/close-window.html' %}
{% endblock %}
{% block heading %}
{% endblock %}
</p>
<article class="panel-block is-active">
<div class="control">
{% block panel_content %}
{% endblock %}
</div>
</article>
</nav>

View File

@@ -1,37 +0,0 @@
<div id="widget">
<div id="widget-{{ unique }}" class="grid-stack-item" {% block widget_options %}gs-w="10" gs-h="1" gs-y="10" gs-x="1"{% endblock %}>
<div class="grid-stack-item-content">
<nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %}
{% include 'partials/close-widget.html' %}
{% endblock %}
<i
class="fa-solid fa-arrows-minimize has-text-grey-light float-right"
onclick='grid.compact();'></i>
{% block heading %}
{{ title }}
{% endblock %}
</p>
<article class="panel-block is-active">
<div class="control">
{% block panel_content %}
{% include window_content %}
{% endblock %}
</div>
</article>
</nav>
</div>
</div>
</div>
<script>
{% block custom_script %}
{% endblock %}
var widget_event = new Event('load-widget');
document.dispatchEvent(widget_event);
</script>
{% block custom_end %}
{% endblock %}

View File

@@ -1,10 +0,0 @@
<magnet-block attract-distance="10" align-to="outer|center" class="floating-window">
{% extends 'wm/panel.html' %}
{% block heading %}
{{ title }}
{% endblock %}
{% block panel_content %}
{% include window_content %}
{% endblock %}
</magnet-block>

View File

@@ -6,4 +6,10 @@ register = template.Library()
@register.filter @register.filter
def pretty(data): def pretty(data):
return orjson.dumps(data, option=orjson.OPT_INDENT_2).decode("utf-8") prettified = orjson.dumps(data, option=orjson.OPT_INDENT_2).decode("utf-8")
if prettified.startswith("{"):
prettified = prettified[1:]
if prettified.endswith("}"):
prettified = prettified[:-1]
return prettified

View File

@@ -0,0 +1,10 @@
from django import template
register = template.Library()
@register.filter
def splitstr(value, arg):
if type(value) == int:
raise Exception(f"Attempt to split {value} with separator {arg}")
return value.split(arg)

View File

@@ -43,7 +43,6 @@ class ColoredFormatter(logging.Formatter):
def get_logger(name): def get_logger(name):
# Define the logging format # Define the logging format
FORMAT = "%(asctime)s %(levelname)18s $BOLD%(name)13s$RESET - %(message)s" FORMAT = "%(asctime)s %(levelname)18s $BOLD%(name)13s$RESET - %(message)s"
COLOR_FORMAT = formatter_message(FORMAT, True) COLOR_FORMAT = formatter_message(FORMAT, True)

View File

@@ -1,753 +0,0 @@
# import re
# from base64 import b64encode
# from random import randint
# from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
# from cryptography.hazmat.primitives.ciphers.modes import ECB
# from django.conf import settings
# from siphashc import siphash
# from sortedcontainers import SortedSet
import uuid
# from core import r
from django.core.exceptions import ImproperlyConfigured
from django.core.paginator import Paginator
from django.db.models import QuerySet
from django.http import Http404, HttpResponse, HttpResponseBadRequest
from django.urls import reverse
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from rest_framework.parsers import FormParser
from core.util import logs
log = logs.get_logger(__name__)
class RestrictedViewMixin:
"""
This mixin overrides two helpers in order to pass the user object to the filters.
get_queryset alters the objects returned for list views.
get_form_kwargs passes the request object to the form class. Remaining permissions
checks are in forms.py
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = "page"
ordering = None
def get_queryset(self, **kwargs):
"""
This function is overriden to filter the objects by the requesting user.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
# queryset = queryset.all()
queryset = queryset.filter(user=self.request.user)
elif self.model is not None:
queryset = self.model._default_manager.filter(user=self.request.user)
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {"cls": self.__class__.__name__}
)
if hasattr(self, "get_ordering"):
ordering = self.get_ordering()
if ordering:
if isinstance(ordering, str):
ordering = (ordering,)
queryset = queryset.order_by(*ordering)
return queryset
def get_form_kwargs(self):
"""Passes the request object to the form class.
This is necessary to only display members that belong to a given user"""
kwargs = super().get_form_kwargs()
kwargs["request"] = self.request
return kwargs
class ObjectNameMixin(object):
def __init__(self, *args, **kwargs):
if self.model is None:
self.title = self.context_object_name.title()
self.title_singular = self.context_object_name_singular.title()
else:
self.title_singular = self.model._meta.verbose_name.title() # Hook
self.context_object_name_singular = self.title_singular.lower() # hook
self.title = self.model._meta.verbose_name_plural.title() # Hooks
self.context_object_name = self.title.lower() # hooks
self.context_object_name = self.context_object_name.replace(" ", "")
self.context_object_name_singular = (
self.context_object_name_singular.replace(" ", "")
)
super().__init__(*args, **kwargs)
class ObjectList(RestrictedViewMixin, ObjectNameMixin, ListView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/objects.html"
list_template = None
page_title = None
page_subtitle = None
list_url_name = None
# WARNING: TAKEN FROM locals()
list_url_args = ["type"]
submit_url_name = None
delete_all_url_name = None
widget_options = None
# copied from BaseListView
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.request = request
self.object_list = self.get_queryset(**kwargs)
if isinstance(self.object_list, HttpResponse):
return self.object_list
if isinstance(self.object_list, HttpResponseBadRequest):
return self.object_list
allow_empty = self.get_allow_empty()
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
list_url_args = {}
for arg in self.list_url_args:
if arg in locals():
list_url_args[arg] = locals()[arg]
elif arg in kwargs:
list_url_args[arg] = kwargs[arg]
orig_type = type
if type == "page":
type = "modal"
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if self.get_paginate_by(self.object_list) is not None and hasattr(
self.object_list, "exists"
):
is_empty = not self.object_list.exists()
else:
is_empty = not self.object_list
if is_empty:
raise Http404("Empty list")
context = self.get_context_data()
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["list_template"] = self.list_template
context["page_title"] = self.page_title
context["page_subtitle"] = self.page_subtitle
context["type"] = type
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
if self.submit_url_name is not None:
context["submit_url"] = reverse(self.submit_url_name, kwargs={"type": type})
if self.list_url_name is not None:
context["list_url"] = reverse(self.list_url_name, kwargs=list_url_args)
if self.delete_all_url_name:
context["delete_all_url"] = reverse(self.delete_all_url_name)
if self.widget_options:
context["widget_options"] = self.widget_options
# Return partials for HTMX
if self.request.htmx:
if request.headers["HX-Target"] == self.context_object_name + "-table":
self.template_name = self.list_template
elif orig_type == "page":
self.template_name = self.list_template
else:
context["window_content"] = self.list_template
return self.render_to_response(context)
class ObjectCreate(RestrictedViewMixin, ObjectNameMixin, CreateView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object-form.html"
parser_classes = [FormParser]
page_title = None
page_subtitle = None
model = None
submit_url_name = None
submit_url_args = ["type"]
request = None
# Whether to hide the cancel button in the form
hide_cancel = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = "Create " + self.context_object_name_singular
def post_save(self, obj):
pass
def form_valid(self, form):
obj = form.save(commit=False)
if self.request is None:
raise Exception("Request is None")
obj.user = self.request.user
obj.save()
form.save_m2m()
self.post_save(obj)
context = {"message": "Object created", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.get(self.request, **self.kwargs, form=form)
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
self.request = request
self.kwargs = kwargs
if type == "widget":
self.hide_cancel = True
if type == "page":
type = "modal"
self.object = None
submit_url_args = {}
for arg in self.submit_url_args:
if arg in locals():
submit_url_args[arg] = locals()[arg]
elif arg in kwargs:
submit_url_args[arg] = kwargs[arg]
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
context = self.get_context_data()
form = kwargs.get("form", None)
if form:
context["form"] = form
context["unique"] = unique
context["window_content"] = self.window_content
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
context["submit_url"] = submit_url
context["type"] = type
context["hide_cancel"] = self.hide_cancel
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
response = self.render_to_response(context)
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def post(self, request, *args, **kwargs):
self.request = request
self.template_name = "partials/notify.html"
return super().post(request, *args, **kwargs)
class ObjectRead(RestrictedViewMixin, ObjectNameMixin, DetailView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object.html"
detail_template = "partials/generic-detail.html"
page_title = None
page_subtitle = None
model = None
# submit_url_name = None
detail_url_name = None
# WARNING: TAKEN FROM locals()
detail_url_args = ["type"]
request = None
def get(self, request, *args, **kwargs):
type = kwargs.get("type", None)
if not type:
return HttpResponseBadRequest("No type specified")
if type not in self.allowed_types:
return HttpResponseBadRequest()
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
detail_url_args = {}
for arg in self.detail_url_args:
if arg in locals():
detail_url_args[arg] = locals()[arg]
elif arg in kwargs:
detail_url_args[arg] = kwargs[arg]
self.request = request
self.object = self.get_object(**kwargs)
if isinstance(self.object, HttpResponse):
return self.object
orig_type = type
if type == "page":
type = "modal"
context = self.get_context_data()
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["detail_template"] = self.detail_template
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
context["type"] = type
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
if self.detail_url_name is not None:
context["detail_url"] = reverse(
self.detail_url_name, kwargs=detail_url_args
)
# Return partials for HTMX
if self.request.htmx:
if request.headers["HX-Target"] == self.context_object_name + "-info":
self.template_name = self.detail_template
elif orig_type == "page":
self.template_name = self.detail_template
else:
context["window_content"] = self.detail_template
return self.render_to_response(context)
class ObjectUpdate(RestrictedViewMixin, ObjectNameMixin, UpdateView):
allowed_types = ["modal", "widget", "window", "page"]
window_content = "window-content/object-form.html"
parser_classes = [FormParser]
page_title = None
page_subtitle = None
model = None
submit_url_name = None
submit_url_args = ["type", "pk"]
request = None
# Whether pk is required in the get request
pk_required = True
# Whether to hide the cancel button in the form
hide_cancel = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = "Update " + self.context_object_name_singular
def post_save(self, obj):
pass
def form_valid(self, form):
obj = form.save(commit=False)
if self.request is None:
raise Exception("Request is None")
obj.save()
form.save_m2m()
self.post_save(obj)
context = {"message": "Object updated", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.get(self.request, **self.kwargs, form=form)
def get(self, request, *args, **kwargs):
self.request = request
type = kwargs.get("type", None)
pk = kwargs.get("pk", None)
if not type:
return HttpResponseBadRequest("No type specified")
if not pk:
if self.pk_required:
return HttpResponseBadRequest("No pk specified")
if type not in self.allowed_types:
return HttpResponseBadRequest("Invalid type specified")
self.template_name = f"wm/{type}.html"
unique = str(uuid.uuid4())[:8]
if type == "widget":
self.hide_cancel = True
if type == "page":
type = "modal"
self.object = self.get_object()
submit_url_args = {}
for arg in self.submit_url_args:
if arg in locals():
submit_url_args[arg] = locals()[arg]
elif arg in kwargs:
submit_url_args[arg] = kwargs[arg]
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
context = self.get_context_data()
form = kwargs.get("form", None)
if form:
context["form"] = form
context["title"] = self.title + f" ({type})"
context["title_singular"] = self.title_singular
context["unique"] = unique
context["window_content"] = self.window_content
context["context_object_name"] = self.context_object_name
context["context_object_name_singular"] = self.context_object_name_singular
context["submit_url"] = submit_url
context["type"] = type
context["hide_cancel"] = self.hide_cancel
if self.page_title:
context["page_title"] = self.page_title
if self.page_subtitle:
context["page_subtitle"] = self.page_subtitle
response = self.render_to_response(context)
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
def post(self, request, *args, **kwargs):
self.request = request
self.template_name = "partials/notify.html"
return super().post(request, *args, **kwargs)
class ObjectDelete(RestrictedViewMixin, ObjectNameMixin, DeleteView):
model = None
template_name = "partials/notify.html"
# Overriden to prevent success URL from being used
def delete(self, request, *args, **kwargs):
"""
Call the delete() method on the fetched object and then redirect to the
success URL.
"""
self.object = self.get_object()
# success_url = self.get_success_url()
self.object.delete()
context = {"message": "Object deleted", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
# This will be used in newer Django versions, until then we get a warning
def form_valid(self, form):
"""
Call the delete() method on the fetched object.
"""
self.object = self.get_object()
self.object.delete()
context = {"message": "Object deleted", "class": "success"}
response = self.render_to_response(context)
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
return response
# from random import randint
# from timeit import timeit
# entries = 10000
# a = [
# {'ts': "sss", 'msg': randint(1, 2), str(randint(1, 2)): \
# randint(1, 2)} for x in range(entries)
# ]
# kk = ["msg", "nick"]
# call = lambda: dedup_list(a, kk)
# #print(timeit(call, number=10))
# print(dedup_list(a, kk))
# # sh-5.1$ python helpers.py
# # 1.0805372429895215
# def base36encode(number, alphabet="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
# """Converts an integer to a base36 string."""
# if not isinstance(number, (int)):
# raise TypeError("number must be an integer")
# base36 = ""
# sign = ""
# if number < 0:
# sign = "-"
# number = -number
# if 0 <= number < len(alphabet):
# return sign + alphabet[number]
# while number != 0:
# number, i = divmod(number, len(alphabet))
# base36 = alphabet[i] + base36
# return sign + base36
# def base36decode(number):
# return int(number, 36)
# def randomise_list(user, data):
# """
# Randomise data in a list of dictionaries.
# """
# if user.has_perm("core.bypass_randomisation"):
# return
# if isinstance(data, list):
# for index, item in enumerate(data):
# for key, value in item.items():
# if key in settings.RANDOMISE_FIELDS:
# if isinstance(value, int):
# min_val = value - (value * settings.RANDOMISE_RATIO)
# max_val = value + (value * settings.RANDOMISE_RATIO)
# new_val = randint(int(min_val), int(max_val))
# data[index][key] = new_val
# elif isinstance(data, dict):
# for key, value in data.items():
# # if key in settings.RANDOMISE_FIELDS:
# if isinstance(value, int):
# min_val = value - (value * settings.RANDOMISE_RATIO)
# max_val = value + (value * settings.RANDOMISE_RATIO)
# new_val = randint(int(min_val), int(max_val))
# data[key] = new_val
# def obfuscate_list(user, data):
# """
# Obfuscate data in a list of dictionaries.
# """
# if user.has_perm("core.bypass_obfuscation"):
# return
# for index, item in enumerate(data):
# for key, value in item.items():
# # Obfuscate a ratio of the field
# if key in settings.OBFUSCATE_FIELDS:
# length = len(value) - 1
# split = int(length * settings.OBFUSCATE_KEEP_RATIO)
# first_part = value[:split]
# second_part = value[split:]
# second_len = len(second_part)
# second_part = "*" * second_len
# data[index][key] = first_part + second_part
# # Obfuscate value based on fields
# # Example: 2022-02-02 -> 2022-02-**
# # 14:11:12 -> 14:11:**
# elif key in settings.OBFUSCATE_FIELDS_SEP:
# if "-" in value:
# sep = "-"
# value_spl = value.split("-")
# hide_num = settings.OBFUSCATE_DASH_NUM
# elif ":" in value:
# sep = ":"
# value_spl = value.split(":")
# hide_num = settings.OBFUSCATE_COLON_NUM
# first_part = value_spl[:hide_num]
# second_part = value_spl[hide_num:]
# for index_x, x in enumerate(second_part):
# x_len = len(x)
# second_part[index_x] = "*" * x_len
# result = sep.join([*first_part, *second_part])
# data[index][key] = result
# for key in settings.COMBINE_FIELDS:
# for index, item in enumerate(data):
# if key in item:
# k1, k2 = settings.COMBINE_FIELDS[key]
# if k1 in item and k2 in item:
# data[index][key] = item[k1] + item[k2]
# def hash_list(user, data, hash_keys=False):
# """
# Hash a list of dicts or a list with SipHash42.
# """
# if user.has_perm("core.bypass_hashing"):
# return
# cache = "cache.hash"
# hash_table = {}
# if isinstance(data, dict):
# data_copy = [{x: data[x]} for x in data]
# else:
# data_copy = type(data)((data))
# for index, item in enumerate(data_copy):
# if "src" in item:
# if item["src"] in settings.SAFE_SOURCES:
# continue
# if isinstance(item, dict):
# for key, value in list(item.items()):
# if (
# key not in settings.WHITELIST_FIELDS
# and key not in settings.NO_OBFUSCATE_PARAMS
# ):
# if isinstance(value, int):
# value = str(value)
# if isinstance(value, bool):
# continue
# if value is None:
# continue
# if hash_keys:
# hashed = siphash(settings.HASHING_KEY, key)
# else:
# hashed = siphash(settings.HASHING_KEY, value)
# encoded = base36encode(hashed)
# if encoded not in hash_table:
# if hash_keys:
# hash_table[encoded] = key
# else:
# hash_table[encoded] = value
# if hash_keys:
# # Rename the dict key
# data[encoded] = data.pop(key)
# else:
# data[index][key] = encoded
# elif isinstance(item, str):
# hashed = siphash(settings.HASHING_KEY, item)
# encoded = base36encode(hashed)
# if encoded not in hash_table:
# hash_table[encoded] = item
# data[index] = encoded
# if hash_table:
# r.hmset(cache, hash_table)
# def hash_lookup(user, data_dict, supplementary_data=None):
# cache = "cache.hash"
# hash_list = SortedSet()
# denied = []
# for key, value in list(data_dict.items()):
# if "source" in data_dict:
# if data_dict["source"] in settings.SAFE_SOURCES:
# continue
# if "src" in data_dict:
# if data_dict["src"] in settings.SAFE_SOURCES:
# continue
# if supplementary_data:
# if "source" in supplementary_data:
# if supplementary_data["source"] in settings.SAFE_SOURCES:
# continue
# if key in settings.SEARCH_FIELDS_DENY:
# if not user.has_perm("core.bypass_hashing"):
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# if (
# key not in settings.WHITELIST_FIELDS
# and key not in settings.NO_OBFUSCATE_PARAMS
# ):
# if not value:
# continue
# # hashes = re.findall("\|([^\|]*)\|", value) # noqa
# if isinstance(value, str):
# hashes = re.findall("[A-Z0-9]{12,13}", value)
# elif isinstance(value, dict):
# hashes = []
# for key, value in value.items():
# if not value:
# continue
# hashes_iter = re.findall("[A-Z0-9]{12,13}", value)
# for h in hashes_iter:
# hashes.append(h)
# if not hashes:
# # Otherwise the user could inject plaintext search queries
# if not user.has_perm("core.bypass_hashing"):
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# continue
# else:
# # There are hashes here but there shouldn't be!
# if key in settings.TAG_SEARCH_DENY:
# data_dict[key] = LookupDenied(key=key, value=data_dict[key])
# denied.append(data_dict[key])
# continue
# for hash in hashes:
# hash_list.add(hash)
# if hash_list:
# values = r.hmget(cache, *hash_list)
# if not values:
# return
# for index, val in enumerate(values):
# if val is None:
# values[index] = b"ERR"
# values = [x.decode() for x in values]
# total = dict(zip(hash_list, values))
# for key in data_dict.keys():
# for hash in total:
# if data_dict[key]:
# if isinstance(data_dict[key], str):
# if hash in data_dict[key]:
# data_dict[key] = data_dict[key].replace(
# f"{hash}", total[hash]
# )
# elif isinstance(data_dict[key], dict):
# for k2, v2 in data_dict[key].items():
# if hash in v2:
# data_dict[key][k2] = v2.repl
# ace(f"{hash}", total[hash])
# return denied
# def encrypt_list(user, data, secret):
# if user.has_perm("core.bypass_encryption"):
# return
# cipher = Cipher(algorithms.AES(secret), ECB())
# for index, item in enumerate(data):
# for key, value in item.items():
# if key not in settings.WHITELIST_FIELDS:
# encryptor = cipher.encryptor()
# if isinstance(value, int):
# value = str(value)
# if isinstance(value, bool):
# continue
# if value is None:
# continue
# decoded = value.encode("utf8", "replace")
# length = 16 - (len(decoded) % 16)
# decoded += bytes([length]) * length
# ct = encryptor.update(decoded) + encryptor.finalize()
# final_str = b64encode(ct)
# data[index][key] = final_str.decode("utf-8", "replace")

View File

View File

@@ -0,0 +1,36 @@
from django.shortcuts import render
from django.views import View
from rest_framework.parsers import FormParser
from rest_framework.views import APIView
from core.db.storage import db
from mixins.views import ObjectRead
from core.views.manage.permissions import SuperUserRequiredMixin
class MonolithStats(SuperUserRequiredMixin, View):
template_name = "manage/monolith/stats/index.html"
def get(self, request):
return render(request, self.template_name)
class MonolithDBStats(SuperUserRequiredMixin, ObjectRead):
detail_template = "manage/monolith/stats/overview.html"
context_object_name_singular = "Status"
context_object_name = "Status"
detail_url_name = "monolith_stats_db"
detail_url_args = ["type"]
def get_object(self, **kwargs):
search_query = "SHOW TABLE main STATUS"
stats = db.run_query(
self.request.user,
search_query=search_query,
path="sql?mode=raw",
raw=True,
#method="get",
)
return stats

View File

@@ -121,7 +121,6 @@ class ThresholdIRCNetworkRelayDel(SuperUserRequiredMixin, APIView):
""" """
deleted = threshold.del_relay(net, num) deleted = threshold.del_relay(net, num)
if deleted["success"]: if deleted["success"]:
message = f"Deleted relay {num}" message = f"Deleted relay {num}"
message_class = "success" message_class = "success"
else: else:
@@ -150,7 +149,6 @@ class ThresholdIRCNetworkRelayProvision(SuperUserRequiredMixin, APIView):
""" """
provisioned = threshold.irc_provision_relay(net, num) provisioned = threshold.irc_provision_relay(net, num)
if provisioned["success"]: if provisioned["success"]:
message = f"Provisioned relay {num}" message = f"Provisioned relay {num}"
message_class = "success" message_class = "success"
else: else:
@@ -179,7 +177,6 @@ class ThresholdIRCNetworkRelayAuth(SuperUserRequiredMixin, APIView):
""" """
provisioned = threshold.irc_enable_auth(net, num) provisioned = threshold.irc_enable_auth(net, num)
if provisioned["success"]: if provisioned["success"]:
message = f"Enabled authentication on relay {num}" message = f"Enabled authentication on relay {num}"
message_class = "success" message_class = "success"
else: else:

View File

@@ -1,16 +1,17 @@
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.shortcuts import render from django.shortcuts import render
from mixins.views import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
from rest_framework.views import APIView from rest_framework.views import APIView
from core.db.storage import db
from core.forms import NotificationRuleForm, NotificationSettingsForm from core.forms import NotificationRuleForm, NotificationSettingsForm
from core.lib.rules import NotificationRuleData
from core.models import NotificationRule, NotificationSettings from core.models import NotificationRule, NotificationSettings
from core.views.helpers import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
# Notifications - we create a new notification settings object if there isn't one # Notifications - we create a new notification settings object if there isn't one
# Hence, there is only an update view, not a create view. # Hence, there is only an update view, not a create view.
class NotificationsUpdate(LoginRequiredMixin, PermissionRequiredMixin, ObjectUpdate): class NotificationsUpdate(LoginRequiredMixin, ObjectUpdate):
permission_required = "use_rules"
model = NotificationSettings model = NotificationSettings
form_class = NotificationSettingsForm form_class = NotificationSettingsForm
@@ -69,13 +70,16 @@ class RuleClear(LoginRequiredMixin, PermissionRequiredMixin, APIView):
permission_required = "use_rules" permission_required = "use_rules"
def post(self, request, type, pk): def post(self, request, type, pk):
template_name = "partials/notify.html" template_name = "mixins/partials/notify.html"
rule = NotificationRule.objects.get(pk=pk, user=request.user) rule = NotificationRule.objects.get(pk=pk, user=request.user)
if isinstance(rule.match, dict): if isinstance(rule.match, dict):
for index in rule.match: for index in rule.match:
rule.match[index] = False rule.match[index] = None
rule.save() rule.save()
rule_data = NotificationRuleData(rule.user, rule, db=db)
rule_data.clear_database_matches()
cleared_indices = ", ".join(rule.match) cleared_indices = ", ".join(rule.match)
context = { context = {
"message": f"Cleared match status for indices: {cleared_indices}", "message": f"Cleared match status for indices: {cleared_indices}",

View File

@@ -81,15 +81,21 @@ def make_graph(results):
graph = [] graph = []
for index, item in enumerate(results): for index, item in enumerate(results):
date = str(index) date = str(index)
sentiment = None
if "meta" in item:
if "aggs" in item["meta"]:
if "avg_sentiment" in item["meta"]["aggs"]:
sentiment = item["meta"]["aggs"]["avg_sentiment"]["value"]
else:
if "sentiment" in item:
sentiment = item["sentiment"]
graph.append( graph.append(
{ {
"text": item.get("words_noun", None) "text": item.get("msg", None) or item.get("id"),
or item.get("msg", None)
or item.get("id"),
"nick": item.get("nick", None), "nick": item.get("nick", None),
"channel": item.get("channel", None), "channel": item.get("channel", None),
"net": item.get("net", None), "net": item.get("net", None),
"value": item.get("sentiment", None) or None, "value": sentiment,
"date": date, "date": date,
} }
) )
@@ -98,10 +104,11 @@ def make_graph(results):
class DrilldownTableView(SingleTableView): class DrilldownTableView(SingleTableView):
table_class = DrilldownTable table_class = DrilldownTable
template_name = "wm/widget.html" template_name = "mixins/wm/widget.html"
window_content = "window-content/results.html" window_content = "window-content/results.html"
# htmx_partial = "partials/" # htmx_partial = "partials/"
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
widget_options = 'gs-w="10" gs-h="1" gs-y="10" gs-x="1"'
def common_request(self, request, **kwargs): def common_request(self, request, **kwargs):
extra_params = {} extra_params = {}
@@ -112,16 +119,20 @@ class DrilldownTableView(SingleTableView):
sizes = settings.MAIN_SIZES sizes = settings.MAIN_SIZES
if request.GET: if request.GET:
print("GET")
self.template_name = "index.html" self.template_name = "index.html"
# GET arguments in URL like ?query=xyz # GET arguments in URL like ?query=xyz
query_params = request.GET.dict() query_params = request.GET.dict()
print("QUERY_PARAMS GET", query_params)
if request.htmx: if request.htmx:
if request.resolver_match.url_name == "search_partial": if request.resolver_match.url_name == "search_partial":
self.template_name = "partials/results_table.html" self.template_name = "partials/results_table.html"
elif request.POST: elif request.POST:
print("POST")
query_params = request.POST.dict() query_params = request.POST.dict()
else: else:
self.template_name = "index.html" self.template_name = "index.html"
print("FRESH")
# No query, this is a fresh page load # No query, this is a fresh page load
# Don't try to search, since there's clearly nothing to do # Don't try to search, since there's clearly nothing to do
params_with_defaults = {} params_with_defaults = {}
@@ -130,6 +141,7 @@ class DrilldownTableView(SingleTableView):
"sizes": sizes, "sizes": sizes,
"params": params_with_defaults, "params": params_with_defaults,
"unique": "results", "unique": "results",
"widget_options": self.widget_options,
"window_content": self.window_content, "window_content": self.window_content,
"title": "Results", "title": "Results",
} }
@@ -187,6 +199,7 @@ class DrilldownTableView(SingleTableView):
# We don't want a random one since we only want one results pane. # We don't want a random one since we only want one results pane.
context["unique"] = "results" context["unique"] = "results"
context["window_content"] = self.window_content context["window_content"] = self.window_content
context["widget_options"] = self.widget_options
context["title"] = "Results" context["title"] = "Results"
# Valid sizes # Valid sizes
@@ -209,9 +222,9 @@ class DrilldownTableView(SingleTableView):
# Still push the URL so they can share it to get assistance # Still push the URL so they can share it to get assistance
if request.GET: if request.GET:
if request.htmx: if request.htmx:
response["HX-Push"] = reverse("home") + "?" + url_params response["HX-Replace-Url"] = reverse("home") + "?" + url_params
elif request.POST: elif request.POST:
response["HX-Push"] = reverse("home") + "?" + url_params response["HX-Replace-Url"] = reverse("home") + "?" + url_params
return response return response
# Create data for chart.js sentiment graph # Create data for chart.js sentiment graph
@@ -265,7 +278,7 @@ class DrilldownTableView(SingleTableView):
response = self.render_to_response(context) response = self.render_to_response(context)
# if not request.method == "GET": # if not request.method == "GET":
if "client_uri" in context: if "client_uri" in context:
response["HX-Push"] = reverse("home") + "?" + context["client_uri"] response["HX-Replace-Url"] = reverse("home") + "?" + context["client_uri"]
return response return response
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):

View File

@@ -12,7 +12,6 @@ def format_header(self):
header = header.lower() header = header.lower()
header = header.title() header = header.title()
if header != "Ident": if header != "Ident":
header = header.replace("Uuid", "UUID")
header = header.replace("Id", "ID") header = header.replace("Id", "ID")
header = header.replace("id", "ID") header = header.replace("id", "ID")
if header == "Ts": if header == "Ts":
@@ -79,7 +78,8 @@ class DrilldownTable(Table):
file_md5 = Column() file_md5 = Column()
file_ext = Column() file_ext = Column()
file_size = Column() file_size = Column()
rule_uuid = Column() rule_id = Column()
batch_id = Column()
index = Column() index = Column()
meta = Column() meta = Column()
match_ts = Column() match_ts = Column()

579
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,579 @@
version: "2.2"
services:
app:
image: xf/neptune:latest
container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
# env_file:
# - stack.env
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
processing:
image: xf/neptune:latest
container_name: processing_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py processing'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
scheduling:
image: xf/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
# networks:
# - default
# - xf
# - db
network_mode: host
migration:
image: xf/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
# networks:
# - default
# - xf
# - db
network_mode: host
collectstatic:
image: xf/neptune:latest
container_name: collectstatic_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- type: bind
source: /code/run
target: /var/run
# volumes_from:
# - tmp
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
# networks:
# - default
# - xf
# - db
network_mode: host
# nginx:
# image: nginx:latest
# container_name: nginx_neptune
# ports:
# - ${APP_PORT}:9999
# ulimits:
# nproc: 65535
# nofile:
# soft: 65535
# hard: 65535
# volumes:
# - ${PORTAINER_GIT_DIR}:/code
# - ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
# - neptune_static:${STATIC_ROOT}
# # volumes_from:
# # - tmp
# networks:
# - default
# - xf
# depends_on:
# app:
# condition: service_started
# tmp:
# image: busybox
# container_name: tmp_neptune
# command: chmod -R 777 /var/run/socks
# volumes:
# - /var/run/socks
redis:
image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf
# ulimits:
# nproc: 65535
# nofile:
# soft: 65535
# hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
- neptune_redis_data:/data
- type: bind
source: /code/run
target: /var/run
# volumes_from:
# - tmp
healthcheck:
test: "redis-cli ping"
interval: 2s
timeout: 2s
retries: 15
# networks:
# - default
# - xf
# networks:
# default:
# driver: bridge
# xf:
# external: true
# db:
# external: true
volumes:
# neptune_static: {}
neptune_redis_data: {}

View File

@@ -1,194 +0,0 @@
version: "2.2"
services:
app:
image: pathogen/neptune:latest
container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
processing:
image: pathogen/neptune:latest
container_name: processing_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py processing'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
scheduling:
image: pathogen/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
migration:
image: pathogen/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
collectstatic:
image: pathogen/neptune:latest
container_name: collectstatic_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
env_file:
- stack.env
depends_on:
redis:
condition: service_healthy
nginx:
image: nginx:latest
container_name: nginx_neptune
ports:
- ${APP_PORT}:9999
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
networks:
- default
- pathogen
depends_on:
app:
condition: service_started
tmp:
image: busybox
container_name: tmp_neptune
command: chmod -R 777 /var/run/socks
volumes:
- /var/run/socks
redis:
image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
volumes_from:
- tmp
healthcheck:
test: "redis-cli -s /var/run/socks/redis.sock ping"
interval: 2s
timeout: 2s
retries: 15
networks:
- default
- pathogen
networks:
default:
driver: bridge
pathogen:
external: true
elastic:
external: true
volumes:
neptune_static: {}

View File

@@ -1,5 +1,5 @@
unixsocket /var/run/socks/redis.sock unixsocket /var/run/neptune-redis.sock
unixsocketperm 777 unixsocketperm 777
port 0
# For Monolith PubSub # port 6379
port 6379 # requirepass changeme

View File

@@ -4,9 +4,19 @@ module=app.wsgi:application
env=DJANGO_SETTINGS_MODULE=app.settings env=DJANGO_SETTINGS_MODULE=app.settings
master=1 master=1
pidfile=/tmp/project-master.pid pidfile=/tmp/project-master.pid
socket=0.0.0.0:8000 #socket=0.0.0.0:8000
socket=/var/run/uwsgi-neptune.sock
# socket 777
chmod-socket=777
harakiri=20 harakiri=20
max-requests=100000 #max-requests=100000
# Set a lower value for max-requests to prevent memory leaks from building up over time
max-requests=1000
# Ensure old worker processes are cleaned up properly
reload-on-as=512
reload-on-rss=256
vacuum=1 vacuum=1
home=/venv home=/venv
processes=12 processes=4
threads=2
log-level=debug

View File

@@ -20,3 +20,11 @@ django-debug-toolbar-template-profiler
orjson orjson
msgpack msgpack
apscheduler apscheduler
django-prettyjson
git+https://git.zm.is/XF/django-crud-mixins
# For caching
redis
hiredis
django-cachalot
django_redis
httpx

View File

@@ -1,6 +1,86 @@
# General application settings
APP_PORT=5000 APP_PORT=5000
PORTAINER_GIT_DIR=. PORTAINER_GIT_DIR=.
APP_LOCAL_SETTINGS=./app/local_settings.py APP_LOCAL_SETTINGS=./app/local_settings.py
APP_DATABASE_FILE=./db.sqlite3 APP_DATABASE_FILE=./db.sqlite3
STATIC_ROOT=/conf/static STATIC_ROOT=/code/static
OPERATION=dev OPERATION=uwsgi
# Elasticsearch settings
ELASTICSEARCH_URL=10.1.0.1
ELASTICSEARCH_PORT=9200
ELASTICSEARCH_TLS=True
ELASTICSEARCH_USERNAME=admin
ELASTICSEARCH_PASSWORD=secret
# Manticore settings
MANTICORE_URL=http://127.0.0.1:9308
# Database settings
DB_BACKEND=MANTICORE
INDEX_MAIN=main
INDEX_RESTRICTED=restricted
INDEX_META=meta
INDEX_INT=internal
INDEX_RULE_STORAGE=rule_storage
MAIN_SIZES=1,5,15,30,50,100,250,500,1000
MAIN_SIZES_ANON=1,5,15,30,50,100
MAIN_SOURCES=dis,4ch,all
SOURCES_RESTRICTED=irc
CACHE=True
CACHE_TIMEOUT=2
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE=15
DRILLDOWN_DEFAULT_SIZE=15
DRILLDOWN_DEFAULT_INDEX=main
DRILLDOWN_DEFAULT_SORTING=desc
DRILLDOWN_DEFAULT_SOURCE=all
# URLs
DOMAIN=spy.zm.is
URL=https://spy.zm.is
# Access control
ALLOWED_HOSTS=spy.zm.is
# CSRF
CSRF_TRUSTED_ORIGINS=https://spy.zm.is
# Stripe settings
BILLING_ENABLED=False
STRIPE_TEST=True
STRIPE_API_KEY_TEST=
STRIPE_PUBLIC_API_KEY_TEST=
STRIPE_API_KEY_PROD=
STRIPE_PUBLIC_API_KEY_PROD=
STRIPE_ENDPOINT_SECRET=
STRIPE_ADMIN_COUPON=
# Threshold settings
THRESHOLD_ENDPOINT=http://threshold:13869
THRESHOLD_API_KEY=api_1
THRESHOLD_API_TOKEN=
THRESHOLD_API_COUNTER=
# NickTrace settings
NICKTRACE_MAX_ITERATIONS=4
NICKTRACE_MAX_CHUNK_SIZE=500
NICKTRACE_QUERY_SIZE=10000
# Meta settings
META_MAX_ITERATIONS=4
META_MAX_CHUNK_SIZE=500
META_QUERY_SIZE=10000
# Debugging and profiling
DEBUG=n
PROFILER=False
# Redis settings
REDIS_HOST=redis_neptune
REDIS_PASSWORD=changeme
REDIS_DB=1
REDIS_DB_CACHE=10
REDIS_PORT=6379