Compare commits
97 Commits
4f55ffeaf7
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
b61eda52e6
|
|||
|
a4c3834b62
|
|||
|
fe087eb591
|
|||
|
6ff75890b9
|
|||
|
c199d3d078
|
|||
|
66f4e4c264
|
|||
|
b16db665e5
|
|||
|
86fb2ac593
|
|||
|
a519a4ce5e
|
|||
|
f62f0881a1
|
|||
|
fd47a3ddc8
|
|||
|
d8cb3a263b
|
|||
|
27fea06198
|
|||
|
0e12b0d185
|
|||
|
6fe31d99a9
|
|||
|
1ab7a95ebd
|
|||
|
d581d787de
|
|||
|
4ead6ff7c1
|
|||
|
9fcf5041f0
|
|||
|
2fc476b830
|
|||
|
11d4542412
|
|||
|
5d6f96bbf3
|
|||
|
40a710f41e
|
|||
|
87c232d3f9
|
|||
|
df273a6009
|
|||
|
115c6dd1ad
|
|||
|
330cc6c401
|
|||
|
2050e6cb47
|
|||
|
7d0ebf87bd
|
|||
|
c5856ce20b
|
|||
|
0518c9fe1c
|
|||
|
29e57628e4
|
|||
|
cb9500a36d
|
|||
|
e993f0f20e
|
|||
|
56b268bd77
|
|||
|
4042d60c57
|
|||
|
090fae013d
|
|||
|
2356c6bcd7
|
|||
|
f4273e4453
|
|||
|
c67d89c978
|
|||
|
9a8bb9027f
|
|||
|
9519c1ac9f
|
|||
|
7b6da7b704
|
|||
|
0d564788b6
|
|||
|
fd10a4ba8e
|
|||
|
455da73b95
|
|||
|
d8005fa15d
|
|||
|
6a01aea5e1
|
|||
|
a1a5535079
|
|||
|
3f666e8251
|
|||
|
66232c8260
|
|||
|
2c12854a55
|
|||
|
af5c212450
|
|||
|
2a034a16e7
|
|||
|
c356f58d8a
|
|||
|
6a890723d9
|
|||
|
f0455984ef
|
|||
|
1b1dbbc76c
|
|||
|
7e78c2857e
|
|||
|
1eea401657
|
|||
|
81c8e34211
|
|||
|
df1e82c5f2
|
|||
|
79b4512546
|
|||
|
97e932cbae
|
|||
|
0cbd2d8a6f
|
|||
|
66596cda42
|
|||
|
53cb9a7f76
|
|||
|
eb7ff88c15
|
|||
|
2153054cac
|
|||
|
7b05e48d71
|
|||
|
4aa8e67e11
|
|||
|
2eb090f088
|
|||
|
bea84ee2b6
|
|||
|
3d6c8d618f
|
|||
|
ef9734a34d
|
|||
|
c08ecc036f
|
|||
|
1964ab62ec
|
|||
|
742a2f92da
|
|||
|
ddffc2c3d8
|
|||
|
f5e371bf5c
|
|||
|
9de1787de6
|
|||
|
a2207bbcf4
|
|||
|
75603570ff
|
|||
|
2dd9efcc6f
|
|||
|
eb2486afba
|
|||
|
46c7d96310
|
|||
|
6bfa0aa73b
|
|||
|
435d9b5571
|
|||
|
2a1e6b3292
|
|||
|
9ee9c7abde
|
|||
|
dbf581245b
|
|||
|
fbe5607899
|
|||
|
158fffed99
|
|||
|
dd4b2ddd3a
|
|||
|
092d4c64ff
|
|||
|
9aacc2cc51
|
|||
|
031995d4b9
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -58,7 +58,6 @@ cover/
|
|||||||
|
|
||||||
# Django stuff:
|
# Django stuff:
|
||||||
*.log
|
*.log
|
||||||
local_settings.py
|
|
||||||
db.sqlite3
|
db.sqlite3
|
||||||
db.sqlite3-journal
|
db.sqlite3-journal
|
||||||
|
|
||||||
@@ -154,4 +153,8 @@ cython_debug/
|
|||||||
.idea/
|
.idea/
|
||||||
|
|
||||||
.bash_history
|
.bash_history
|
||||||
|
.python_history
|
||||||
.vscode/
|
.vscode/
|
||||||
|
stack.env
|
||||||
|
|
||||||
|
static/
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.6.0
|
rev: 23.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
exclude: ^core/migrations
|
exclude: ^core/migrations
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 5.10.1
|
rev: 5.11.5
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
args: ["--profile", "black"]
|
args: ["--profile", "black"]
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- repo: https://github.com/PyCQA/flake8
|
||||||
rev: 4.0.1
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
args: [--max-line-length=88]
|
args: [--max-line-length=88]
|
||||||
exclude: ^core/migrations
|
exclude: ^core/migrations
|
||||||
- repo: https://github.com/rtts/djhtml
|
- repo: https://github.com/rtts/djhtml
|
||||||
rev: 'v1.5.2' # replace with the latest tag on GitHub
|
rev: v2.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: djhtml
|
- id: djhtml
|
||||||
args: [-t 2]
|
args: [-t 2]
|
||||||
|
|||||||
10
Dockerfile
10
Dockerfile
@@ -2,17 +2,17 @@
|
|||||||
FROM python:3
|
FROM python:3
|
||||||
ARG OPERATION
|
ARG OPERATION
|
||||||
|
|
||||||
RUN useradd -d /code pathogen
|
RUN useradd -d /code xf
|
||||||
RUN mkdir -p /code
|
RUN mkdir -p /code
|
||||||
RUN chown -R pathogen:pathogen /code
|
RUN chown -R xf:xf /code
|
||||||
|
|
||||||
RUN mkdir -p /conf/static
|
RUN mkdir -p /conf/static
|
||||||
RUN chown -R pathogen:pathogen /conf
|
RUN chown -R xf:xf /conf
|
||||||
|
|
||||||
RUN mkdir /venv
|
RUN mkdir /venv
|
||||||
RUN chown pathogen:pathogen /venv
|
RUN chown xf:xf /venv
|
||||||
|
|
||||||
USER pathogen
|
USER xf
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
WORKDIR /code
|
WORKDIR /code
|
||||||
|
|||||||
14
Makefile
14
Makefile
@@ -1,20 +1,20 @@
|
|||||||
run:
|
run:
|
||||||
docker-compose --env-file=stack.env up -d
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env up -d
|
||||||
|
|
||||||
build:
|
build:
|
||||||
docker-compose --env-file=stack.env build
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env build
|
||||||
|
|
||||||
stop:
|
stop:
|
||||||
docker-compose --env-file=stack.env down
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env down
|
||||||
|
|
||||||
log:
|
log:
|
||||||
docker-compose --env-file=stack.env logs -f
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env logs -f --names
|
||||||
|
|
||||||
migrate:
|
migrate:
|
||||||
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
|
||||||
|
|
||||||
makemigrations:
|
makemigrations:
|
||||||
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
|
||||||
|
|
||||||
auth:
|
auth:
|
||||||
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"
|
docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from os import getenv
|
||||||
|
|
||||||
# Elasticsearch settings
|
# Elasticsearch settings
|
||||||
ELASTICSEARCH_URL = "10.1.0.1"
|
ELASTICSEARCH_URL = "10.1.0.1"
|
||||||
ELASTICSEARCH_PORT = 9200
|
ELASTICSEARCH_PORT = 9200
|
||||||
@@ -15,6 +17,7 @@ INDEX_MAIN = "main"
|
|||||||
INDEX_RESTRICTED = "restricted"
|
INDEX_RESTRICTED = "restricted"
|
||||||
INDEX_META = "meta"
|
INDEX_META = "meta"
|
||||||
INDEX_INT = "internal"
|
INDEX_INT = "internal"
|
||||||
|
INDEX_RULE_STORAGE = "rule_storage"
|
||||||
|
|
||||||
MAIN_SIZES = ["1", "5", "15", "30", "50", "100", "250", "500", "1000"]
|
MAIN_SIZES = ["1", "5", "15", "30", "50", "100", "250", "500", "1000"]
|
||||||
MAIN_SIZES_ANON = ["1", "5", "15", "30", "50", "100"]
|
MAIN_SIZES_ANON = ["1", "5", "15", "30", "50", "100"]
|
||||||
@@ -28,7 +31,7 @@ DRILLDOWN_DEFAULT_PARAMS = {
|
|||||||
"size": "15",
|
"size": "15",
|
||||||
"index": "main",
|
"index": "main",
|
||||||
"sorting": "desc",
|
"sorting": "desc",
|
||||||
"source": "4ch",
|
"source": "all",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -103,3 +106,8 @@ META_QUERY_SIZE = 10000
|
|||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
PROFILER = False
|
PROFILER = False
|
||||||
|
|
||||||
|
REDIS_HOST = getenv("REDIS_HOST", "redis_fisk_dev")
|
||||||
|
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
|
||||||
|
REDIS_DB = int(getenv("REDIS_DB", "10"))
|
||||||
|
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
|
||||||
87
app/local_settings.py
Normal file
87
app/local_settings.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
from os import getenv
|
||||||
|
|
||||||
|
trues = ("t", "true", "yes", "y", "1")
|
||||||
|
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL = getenv("ELASTICSEARCH_URL", "10.1.0.1")
|
||||||
|
ELASTICSEARCH_PORT = int(getenv("ELASTICSEARCH_PORT", "9200"))
|
||||||
|
ELASTICSEARCH_TLS = getenv("ELASTICSEARCH_TLS", "True").lower() in trues
|
||||||
|
ELASTICSEARCH_USERNAME = getenv("ELASTICSEARCH_USERNAME", "admin")
|
||||||
|
ELASTICSEARCH_PASSWORD = getenv("ELASTICSEARCH_PASSWORD", "secret")
|
||||||
|
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL = getenv("MANTICORE_URL", "http://example-db-1:9308")
|
||||||
|
|
||||||
|
DB_BACKEND = getenv("DB_BACKEND", "MANTICORE")
|
||||||
|
|
||||||
|
# Common DB settings
|
||||||
|
INDEX_MAIN = getenv("INDEX_MAIN", "main")
|
||||||
|
INDEX_RESTRICTED = getenv("INDEX_RESTRICTED", "restricted")
|
||||||
|
INDEX_META = getenv("INDEX_META", "meta")
|
||||||
|
INDEX_INT = getenv("INDEX_INT", "internal")
|
||||||
|
INDEX_RULE_STORAGE = getenv("INDEX_RULE_STORAGE", "rule_storage")
|
||||||
|
|
||||||
|
MAIN_SIZES = getenv("MAIN_SIZES", "1,5,15,30,50,100,250,500,1000").split(",")
|
||||||
|
MAIN_SIZES_ANON = getenv("MAIN_SIZES_ANON", "1,5,15,30,50,100").split(",")
|
||||||
|
MAIN_SOURCES = getenv("MAIN_SOURCES", "dis,4ch,all").split(",")
|
||||||
|
SOURCES_RESTRICTED = getenv("SOURCES_RESTRICTED", "irc").split(",")
|
||||||
|
CACHE = getenv("CACHE", "False").lower() in trues
|
||||||
|
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", "2"))
|
||||||
|
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE = int(getenv("DRILLDOWN_RESULTS_PER_PAGE", "15"))
|
||||||
|
DRILLDOWN_DEFAULT_PARAMS = {
|
||||||
|
"size": getenv("DRILLDOWN_DEFAULT_SIZE", "15"),
|
||||||
|
"index": getenv("DRILLDOWN_DEFAULT_INDEX", "main"),
|
||||||
|
"sorting": getenv("DRILLDOWN_DEFAULT_SORTING", "desc"),
|
||||||
|
"source": getenv("DRILLDOWN_DEFAULT_SOURCE", "all"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# URLs
|
||||||
|
DOMAIN = getenv("DOMAIN", "example.com")
|
||||||
|
URL = getenv("URL", f"https://{DOMAIN}")
|
||||||
|
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS = getenv("ALLOWED_HOSTS", f"127.0.0.1,{DOMAIN}").split(",")
|
||||||
|
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS = getenv("CSRF_TRUSTED_ORIGINS", URL).split(",")
|
||||||
|
|
||||||
|
# Stripe
|
||||||
|
BILLING_ENABLED = getenv("BILLING_ENABLED", "false").lower() in trues
|
||||||
|
STRIPE_TEST = getenv("STRIPE_TEST", "True").lower() in trues
|
||||||
|
STRIPE_API_KEY_TEST = getenv("STRIPE_API_KEY_TEST", "")
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST = getenv("STRIPE_PUBLIC_API_KEY_TEST", "")
|
||||||
|
STRIPE_API_KEY_PROD = getenv("STRIPE_API_KEY_PROD", "")
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD = getenv("STRIPE_PUBLIC_API_KEY_PROD", "")
|
||||||
|
STRIPE_ENDPOINT_SECRET = getenv("STRIPE_ENDPOINT_SECRET", "")
|
||||||
|
STATIC_ROOT = getenv("STATIC_ROOT", "")
|
||||||
|
SECRET_KEY = getenv("SECRET_KEY", "a")
|
||||||
|
STRIPE_ADMIN_COUPON = getenv("STRIPE_ADMIN_COUPON", "")
|
||||||
|
|
||||||
|
# Threshold
|
||||||
|
THRESHOLD_ENDPOINT = getenv("THRESHOLD_ENDPOINT", "http://threshold:13869")
|
||||||
|
THRESHOLD_API_KEY = getenv("THRESHOLD_API_KEY", "api_1")
|
||||||
|
THRESHOLD_API_TOKEN = getenv("THRESHOLD_API_TOKEN", "")
|
||||||
|
THRESHOLD_API_COUNTER = getenv("THRESHOLD_API_COUNTER", "")
|
||||||
|
|
||||||
|
# NickTrace
|
||||||
|
NICKTRACE_MAX_ITERATIONS = int(getenv("NICKTRACE_MAX_ITERATIONS", "4"))
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE = int(getenv("NICKTRACE_MAX_CHUNK_SIZE", "500"))
|
||||||
|
NICKTRACE_QUERY_SIZE = int(getenv("NICKTRACE_QUERY_SIZE", "10000"))
|
||||||
|
|
||||||
|
# Meta
|
||||||
|
META_MAX_ITERATIONS = int(getenv("META_MAX_ITERATIONS", "4"))
|
||||||
|
META_MAX_CHUNK_SIZE = int(getenv("META_MAX_CHUNK_SIZE", "500"))
|
||||||
|
META_QUERY_SIZE = int(getenv("META_QUERY_SIZE", "10000"))
|
||||||
|
|
||||||
|
DEBUG = getenv("DEBUG", "True").lower() in trues
|
||||||
|
PROFILER = getenv("PROFILER", "False").lower() in trues
|
||||||
|
|
||||||
|
REDIS_HOST = getenv("REDIS_HOST", "redis_neptune_dev")
|
||||||
|
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
|
||||||
|
REDIS_DB = int(getenv("REDIS_DB", "1"))
|
||||||
|
REDIS_DB_CACHE = int(getenv("REDIS_DB_CACHE", "10"))
|
||||||
|
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
|
||||||
|
|
||||||
|
# Elasticsearch blacklist
|
||||||
|
ELASTICSEARCH_BLACKLISTED = {}
|
||||||
@@ -42,7 +42,11 @@ INSTALLED_APPS = [
|
|||||||
"crispy_bulma",
|
"crispy_bulma",
|
||||||
"django_tables2",
|
"django_tables2",
|
||||||
"django_tables2_bulma_template",
|
"django_tables2_bulma_template",
|
||||||
|
"prettyjson",
|
||||||
|
"mixins",
|
||||||
|
"cachalot",
|
||||||
]
|
]
|
||||||
|
|
||||||
CRISPY_TEMPLATE_PACK = "bulma"
|
CRISPY_TEMPLATE_PACK = "bulma"
|
||||||
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
|
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
|
||||||
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
|
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
|
||||||
@@ -51,7 +55,9 @@ MIDDLEWARE = [
|
|||||||
"debug_toolbar.middleware.DebugToolbarMiddleware",
|
"debug_toolbar.middleware.DebugToolbarMiddleware",
|
||||||
"django.middleware.security.SecurityMiddleware",
|
"django.middleware.security.SecurityMiddleware",
|
||||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||||
|
# 'django.middleware.cache.UpdateCacheMiddleware',
|
||||||
"django.middleware.common.CommonMiddleware",
|
"django.middleware.common.CommonMiddleware",
|
||||||
|
# 'django.middleware.cache.FetchFromCacheMiddleware',
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
"django.middleware.csrf.CsrfViewMiddleware",
|
||||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||||
"django.contrib.messages.middleware.MessageMiddleware",
|
"django.contrib.messages.middleware.MessageMiddleware",
|
||||||
@@ -86,7 +92,7 @@ WSGI_APPLICATION = "app.wsgi.application"
|
|||||||
DATABASES = {
|
DATABASES = {
|
||||||
"default": {
|
"default": {
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
"ENGINE": "django.db.backends.sqlite3",
|
||||||
"NAME": BASE_DIR / "db.sqlite3",
|
"NAME": "/conf/db.sqlite3",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,7 +150,7 @@ REST_FRAMEWORK = {
|
|||||||
|
|
||||||
INTERNAL_IPS = [
|
INTERNAL_IPS = [
|
||||||
"127.0.0.1",
|
"127.0.0.1",
|
||||||
"10.1.10.11",
|
# "10.1.10.11",
|
||||||
]
|
]
|
||||||
|
|
||||||
DEBUG_TOOLBAR_PANELS = [
|
DEBUG_TOOLBAR_PANELS = [
|
||||||
@@ -163,10 +169,27 @@ DEBUG_TOOLBAR_PANELS = [
|
|||||||
"debug_toolbar.panels.logging.LoggingPanel",
|
"debug_toolbar.panels.logging.LoggingPanel",
|
||||||
"debug_toolbar.panels.redirects.RedirectsPanel",
|
"debug_toolbar.panels.redirects.RedirectsPanel",
|
||||||
"debug_toolbar.panels.profiling.ProfilingPanel",
|
"debug_toolbar.panels.profiling.ProfilingPanel",
|
||||||
|
"cachalot.panels.CachalotPanel",
|
||||||
]
|
]
|
||||||
|
|
||||||
from app.local_settings import * # noqa
|
from app.local_settings import * # noqa
|
||||||
|
|
||||||
|
# Performance optimisations
|
||||||
|
CACHES = {
|
||||||
|
"default": {
|
||||||
|
"BACKEND": "django_redis.cache.RedisCache",
|
||||||
|
# "LOCATION": "unix:///var/run/socks/redis.sock",
|
||||||
|
# "LOCATION": f"redis://{REDIS_HOST}:{REDIS_PORT}",
|
||||||
|
"LOCATION": "unix:///var/run/neptune-redis.sock",
|
||||||
|
"OPTIONS": {
|
||||||
|
"db": REDIS_DB,
|
||||||
|
# "parser_class": "django_redis.cache.RedisCache",
|
||||||
|
# "PASSWORD": REDIS_PASSWORD,
|
||||||
|
"pool_class": "redis.BlockingConnectionPool",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if PROFILER: # noqa - trust me its there
|
if PROFILER: # noqa - trust me its there
|
||||||
import pyroscope
|
import pyroscope
|
||||||
|
|
||||||
@@ -178,3 +201,12 @@ if PROFILER: # noqa - trust me its there
|
|||||||
# "region": f'{os.getenv("REGION")}',
|
# "region": f'{os.getenv("REGION")}',
|
||||||
# }
|
# }
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def show_toolbar(request):
|
||||||
|
return DEBUG # noqa: from local imports
|
||||||
|
|
||||||
|
|
||||||
|
DEBUG_TOOLBAR_CONFIG = {
|
||||||
|
"SHOW_TOOLBAR_CALLBACK": show_toolbar,
|
||||||
|
}
|
||||||
|
|||||||
69
app/urls.py
69
app/urls.py
@@ -58,21 +58,23 @@ from core.views.manage.threshold.threshold import (
|
|||||||
ThresholdIRCOverview,
|
ThresholdIRCOverview,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Stats
|
||||||
|
from core.views.manage.monolith import stats
|
||||||
|
|
||||||
# Main tool pages
|
# Main tool pages
|
||||||
from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
|
from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
|
||||||
DrilldownContextModal,
|
DrilldownContextModal,
|
||||||
DrilldownTableView,
|
DrilldownTableView,
|
||||||
ThresholdInfoModal,
|
ThresholdInfoModal,
|
||||||
)
|
)
|
||||||
|
from core.views.ui.insights import (
|
||||||
# from core.views.ui.insights import (
|
Insights,
|
||||||
# Insights,
|
InsightsChannels,
|
||||||
# InsightsChannels,
|
InsightsInfoModal,
|
||||||
# InsightsInfoModal,
|
InsightsMeta,
|
||||||
# InsightsMeta,
|
InsightsNicks,
|
||||||
# InsightsNicks,
|
InsightsSearch,
|
||||||
# InsightsSearch,
|
)
|
||||||
# )
|
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("__debug__/", include("debug_toolbar.urls")),
|
path("__debug__/", include("debug_toolbar.urls")),
|
||||||
@@ -93,7 +95,7 @@ urlpatterns = [
|
|||||||
),
|
),
|
||||||
path("cancel/", TemplateView.as_view(template_name="cancel.html"), name="cancel"),
|
path("cancel/", TemplateView.as_view(template_name="cancel.html"), name="cancel"),
|
||||||
path("portal", Portal.as_view(), name="portal"),
|
path("portal", Portal.as_view(), name="portal"),
|
||||||
path("admin/", admin.site.urls),
|
path("sapp/", admin.site.urls),
|
||||||
path("accounts/", include("django.contrib.auth.urls")),
|
path("accounts/", include("django.contrib.auth.urls")),
|
||||||
path("accounts/signup/", Signup.as_view(), name="signup"),
|
path("accounts/signup/", Signup.as_view(), name="signup"),
|
||||||
##
|
##
|
||||||
@@ -103,12 +105,32 @@ urlpatterns = [
|
|||||||
path("context/", DrilldownContextModal.as_view(), name="modal_context"),
|
path("context/", DrilldownContextModal.as_view(), name="modal_context"),
|
||||||
path("context_table/", DrilldownContextModal.as_view(), name="modal_context_table"),
|
path("context_table/", DrilldownContextModal.as_view(), name="modal_context_table"),
|
||||||
##
|
##
|
||||||
# path("ui/insights/", Insights.as_view(), name="insights"),
|
path("ui/insights/index/<str:index>/", Insights.as_view(), name="insights"),
|
||||||
# path("ui/insights/search/", InsightsSearch.as_view(), name="search_insights"),
|
path(
|
||||||
# path("ui/insights/channels/", InsightsChannels.as_view(), name="chans_insights"),
|
"ui/insights/index/<str:index>/search/",
|
||||||
# path("ui/insights/nicks/", InsightsNicks.as_view(), name="nicks_insights"),
|
InsightsSearch.as_view(),
|
||||||
# path("ui/insights/meta/", InsightsMeta.as_view(), name="meta_insights"),
|
name="search_insights",
|
||||||
# path("ui/insights/modal/", InsightsInfoModal.as_view(), name="modal_insights"),
|
),
|
||||||
|
path(
|
||||||
|
"ui/insights/index/<str:index>/channels/",
|
||||||
|
InsightsChannels.as_view(),
|
||||||
|
name="chans_insights",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"ui/insights/index/<str:index>/nicks/",
|
||||||
|
InsightsNicks.as_view(),
|
||||||
|
name="nicks_insights",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"ui/insights/index/<str:index>/meta/",
|
||||||
|
InsightsMeta.as_view(),
|
||||||
|
name="meta_insights",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"ui/insights/index/<str:index>/modal/",
|
||||||
|
InsightsInfoModal.as_view(),
|
||||||
|
name="modal_insights",
|
||||||
|
),
|
||||||
##
|
##
|
||||||
path(
|
path(
|
||||||
"manage/threshold/irc/overview/",
|
"manage/threshold/irc/overview/",
|
||||||
@@ -287,4 +309,19 @@ urlpatterns = [
|
|||||||
notifications.RuleDelete.as_view(),
|
notifications.RuleDelete.as_view(),
|
||||||
name="rule_delete",
|
name="rule_delete",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"rule/<str:type>/clear/<str:pk>/",
|
||||||
|
notifications.RuleClear.as_view(),
|
||||||
|
name="rule_clear",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"manage/monolith/stats/",
|
||||||
|
stats.MonolithStats.as_view(),
|
||||||
|
name="monolith_stats",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"manage/monolith/stats_db/<str:type>/",
|
||||||
|
stats.MonolithDBStats.as_view(),
|
||||||
|
name="monolith_stats_db",
|
||||||
|
)
|
||||||
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||||
|
|||||||
@@ -1,8 +1,20 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
import stripe
|
import stripe
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from redis import StrictRedis
|
from redis import StrictRedis
|
||||||
|
|
||||||
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
|
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
|
||||||
|
|
||||||
|
# /var/run/neptune-redis.sock
|
||||||
|
# use the socket
|
||||||
|
r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=settings.REDIS_DB)
|
||||||
|
# r = StrictRedis(
|
||||||
|
# host=settings.REDIS_HOST,
|
||||||
|
# port=settings.REDIS_PORT,
|
||||||
|
# password=settings.REDIS_PASSWORD,
|
||||||
|
# db=settings.REDIS_DB
|
||||||
|
# )
|
||||||
|
|
||||||
if settings.STRIPE_TEST:
|
if settings.STRIPE_TEST:
|
||||||
stripe.api_key = settings.STRIPE_API_KEY_TEST
|
stripe.api_key = settings.STRIPE_API_KEY_TEST
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import random
|
|||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from datetime import datetime
|
|
||||||
from math import floor, log10
|
from math import floor, log10
|
||||||
|
|
||||||
import orjson
|
import orjson
|
||||||
@@ -50,17 +49,13 @@ def dedup_list(data, check_keys):
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
class QueryError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class StorageBackend(ABC):
|
class StorageBackend(ABC):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.log = logs.get_logger(name)
|
self.log = logs.get_logger(name)
|
||||||
self.log.info(f"Initialising storage backend {name}")
|
self.log.info(f"Initialising storage backend {name}")
|
||||||
|
|
||||||
self.initialise_caching()
|
self.initialise_caching()
|
||||||
self.initialise()
|
# self.initialise()
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
@@ -82,66 +77,6 @@ class StorageBackend(ABC):
|
|||||||
def construct_query(self, **kwargs):
|
def construct_query(self, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def run_query(self, **kwargs):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def parse_size(self, query_params, sizes):
|
|
||||||
if "size" in query_params:
|
|
||||||
size = query_params["size"]
|
|
||||||
if size not in sizes:
|
|
||||||
message = "Size is not permitted"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
size = int(size)
|
|
||||||
else:
|
|
||||||
size = 15
|
|
||||||
|
|
||||||
return size
|
|
||||||
|
|
||||||
def parse_index(self, user, query_params, raise_error=False):
|
|
||||||
if "index" in query_params:
|
|
||||||
index = query_params["index"]
|
|
||||||
if index == "main":
|
|
||||||
index = settings.INDEX_MAIN
|
|
||||||
else:
|
|
||||||
if not user.has_perm(f"core.index_{index}"):
|
|
||||||
message = f"Not permitted to search by this index: {index}"
|
|
||||||
if raise_error:
|
|
||||||
raise QueryError(message)
|
|
||||||
message_class = "danger"
|
|
||||||
return {
|
|
||||||
"message": message,
|
|
||||||
"class": message_class,
|
|
||||||
}
|
|
||||||
if index == "meta":
|
|
||||||
index = settings.INDEX_META
|
|
||||||
elif index == "internal":
|
|
||||||
index = settings.INDEX_INT
|
|
||||||
elif index == "restricted":
|
|
||||||
if not user.has_perm("core.restricted_sources"):
|
|
||||||
message = f"Not permitted to search by this index: {index}"
|
|
||||||
if raise_error:
|
|
||||||
raise QueryError(message)
|
|
||||||
message_class = "danger"
|
|
||||||
return {
|
|
||||||
"message": message,
|
|
||||||
"class": message_class,
|
|
||||||
}
|
|
||||||
index = settings.INDEX_RESTRICTED
|
|
||||||
else:
|
|
||||||
message = f"Index is not valid: {index}"
|
|
||||||
if raise_error:
|
|
||||||
raise QueryError(message)
|
|
||||||
message_class = "danger"
|
|
||||||
return {
|
|
||||||
"message": message,
|
|
||||||
"class": message_class,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
index = settings.INDEX_MAIN
|
|
||||||
return index
|
|
||||||
|
|
||||||
def parse_query(self, query_params, tags, size, custom_query, add_bool, **kwargs):
|
def parse_query(self, query_params, tags, size, custom_query, add_bool, **kwargs):
|
||||||
query_created = False
|
query_created = False
|
||||||
if "query" in query_params:
|
if "query" in query_params:
|
||||||
@@ -163,7 +98,9 @@ class StorageBackend(ABC):
|
|||||||
for tagname, tagvalue in item.items():
|
for tagname, tagvalue in item.items():
|
||||||
add_bool.append({tagname: tagvalue})
|
add_bool.append({tagname: tagvalue})
|
||||||
|
|
||||||
valid = self.check_valid_query(query_params, custom_query)
|
bypass_check = kwargs.get("bypass_check", False)
|
||||||
|
if not bypass_check:
|
||||||
|
valid = self.check_valid_query(query_params, custom_query, **kwargs)
|
||||||
if isinstance(valid, dict):
|
if isinstance(valid, dict):
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
@@ -177,85 +114,9 @@ class StorageBackend(ABC):
|
|||||||
message_class = "warning"
|
message_class = "warning"
|
||||||
return {"message": message, "class": message_class}
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
def parse_source(self, user, query_params, raise_error=False):
|
@abstractmethod
|
||||||
source = None
|
def run_query(self, **kwargs):
|
||||||
if "source" in query_params:
|
pass
|
||||||
source = query_params["source"]
|
|
||||||
|
|
||||||
if source in settings.SOURCES_RESTRICTED:
|
|
||||||
if not user.has_perm("core.restricted_sources"):
|
|
||||||
message = f"Access denied: {source}"
|
|
||||||
if raise_error:
|
|
||||||
raise QueryError(message)
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
elif source not in settings.MAIN_SOURCES:
|
|
||||||
message = f"Invalid source: {source}"
|
|
||||||
if raise_error:
|
|
||||||
raise QueryError(message)
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
|
|
||||||
if source == "all":
|
|
||||||
source = None # the next block will populate it
|
|
||||||
|
|
||||||
if source:
|
|
||||||
sources = [source]
|
|
||||||
else:
|
|
||||||
sources = list(settings.MAIN_SOURCES)
|
|
||||||
if user.has_perm("core.restricted_sources"):
|
|
||||||
for source_iter in settings.SOURCES_RESTRICTED:
|
|
||||||
sources.append(source_iter)
|
|
||||||
|
|
||||||
if "all" in sources:
|
|
||||||
sources.remove("all")
|
|
||||||
|
|
||||||
return sources
|
|
||||||
|
|
||||||
def parse_sort(self, query_params):
|
|
||||||
sort = None
|
|
||||||
if "sorting" in query_params:
|
|
||||||
sorting = query_params["sorting"]
|
|
||||||
if sorting not in ("asc", "desc", "none"):
|
|
||||||
message = "Invalid sort"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
if sorting == "asc":
|
|
||||||
sort = "ascending"
|
|
||||||
elif sorting == "desc":
|
|
||||||
sort = "descending"
|
|
||||||
return sort
|
|
||||||
|
|
||||||
def parse_date_time(self, query_params):
|
|
||||||
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
|
|
||||||
query_params.keys()
|
|
||||||
):
|
|
||||||
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
|
|
||||||
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
|
|
||||||
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
|
|
||||||
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
|
|
||||||
|
|
||||||
return (from_ts, to_ts)
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
def parse_sentiment(self, query_params):
|
|
||||||
sentiment = None
|
|
||||||
if "check_sentiment" in query_params:
|
|
||||||
if "sentiment_method" not in query_params:
|
|
||||||
message = "No sentiment method"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
if "sentiment" in query_params:
|
|
||||||
sentiment = query_params["sentiment"]
|
|
||||||
try:
|
|
||||||
sentiment = float(sentiment)
|
|
||||||
except ValueError:
|
|
||||||
message = "Sentiment is not a float"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
sentiment_method = query_params["sentiment_method"]
|
|
||||||
|
|
||||||
return (sentiment_method, sentiment)
|
|
||||||
|
|
||||||
def filter_blacklisted(self, user, response):
|
def filter_blacklisted(self, user, response):
|
||||||
"""
|
"""
|
||||||
@@ -307,6 +168,71 @@ class StorageBackend(ABC):
|
|||||||
# Actually get rid of all the things we set to None
|
# Actually get rid of all the things we set to None
|
||||||
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
|
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
|
||||||
|
|
||||||
|
def add_bool(self, search_query, add_bool):
|
||||||
|
"""
|
||||||
|
Add the specified boolean matches to search query.
|
||||||
|
"""
|
||||||
|
if not add_bool:
|
||||||
|
return
|
||||||
|
for item in add_bool:
|
||||||
|
search_query["query"]["bool"]["must"].append({"match_phrase": item})
|
||||||
|
|
||||||
|
def add_top(self, search_query, add_top, negative=False):
|
||||||
|
"""
|
||||||
|
Merge add_top with the base of the search_query.
|
||||||
|
"""
|
||||||
|
if not add_top:
|
||||||
|
return
|
||||||
|
if negative:
|
||||||
|
for item in add_top:
|
||||||
|
if "must_not" in search_query["query"]["bool"]:
|
||||||
|
search_query["query"]["bool"]["must_not"].append(item)
|
||||||
|
else:
|
||||||
|
search_query["query"]["bool"]["must_not"] = [item]
|
||||||
|
else:
|
||||||
|
for item in add_top:
|
||||||
|
if "query" not in search_query:
|
||||||
|
search_query["query"] = {"bool": {"must": []}}
|
||||||
|
search_query["query"]["bool"]["must"].append(item)
|
||||||
|
|
||||||
|
def schedule_check_aggregations(self, rule_object, result_map):
|
||||||
|
"""
|
||||||
|
Check the results of a scheduled query for aggregations.
|
||||||
|
"""
|
||||||
|
if rule_object.aggs is None:
|
||||||
|
return result_map
|
||||||
|
for index, (meta, result) in result_map.items():
|
||||||
|
# Default to true, if no aggs are found, we still want to match
|
||||||
|
match = True
|
||||||
|
for agg_name, (operator, number) in rule_object.aggs.items():
|
||||||
|
if agg_name in meta["aggs"]:
|
||||||
|
agg_value = meta["aggs"][agg_name]["value"]
|
||||||
|
|
||||||
|
# TODO: simplify this, match is default to True
|
||||||
|
if operator == ">":
|
||||||
|
if agg_value > number:
|
||||||
|
match = True
|
||||||
|
else:
|
||||||
|
match = False
|
||||||
|
elif operator == "<":
|
||||||
|
if agg_value < number:
|
||||||
|
match = True
|
||||||
|
else:
|
||||||
|
match = False
|
||||||
|
elif operator == "=":
|
||||||
|
if agg_value == number:
|
||||||
|
match = True
|
||||||
|
else:
|
||||||
|
match = False
|
||||||
|
else:
|
||||||
|
match = False
|
||||||
|
else:
|
||||||
|
# No aggregation found, but it is required
|
||||||
|
match = False
|
||||||
|
result_map[index][0]["aggs"][agg_name]["match"] = match
|
||||||
|
|
||||||
|
return result_map
|
||||||
|
|
||||||
def query(self, user, search_query, **kwargs):
|
def query(self, user, search_query, **kwargs):
|
||||||
# For time tracking
|
# For time tracking
|
||||||
start = time.process_time()
|
start = time.process_time()
|
||||||
@@ -334,13 +260,38 @@ class StorageBackend(ABC):
|
|||||||
message = f"Error: {response.info['error']['root_cause'][0]['type']}"
|
message = f"Error: {response.info['error']['root_cause'][0]['type']}"
|
||||||
message_class = "danger"
|
message_class = "danger"
|
||||||
return {"message": message, "class": message_class}
|
return {"message": message, "class": message_class}
|
||||||
if len(response["hits"]["hits"]) == 0:
|
if "took" in response:
|
||||||
|
if response["took"] is None:
|
||||||
|
return None
|
||||||
|
if "error" in response:
|
||||||
|
message = f"Error: {response['error']}"
|
||||||
|
message_class = "danger"
|
||||||
|
time_took = (time.process_time() - start) * 1000
|
||||||
|
# Round to 3 significant figures
|
||||||
|
time_took_rounded = round(
|
||||||
|
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
"took": time_took_rounded,
|
||||||
|
}
|
||||||
|
elif len(response["hits"]["hits"]) == 0:
|
||||||
message = "No results."
|
message = "No results."
|
||||||
message_class = "danger"
|
message_class = "danger"
|
||||||
return {"message": message, "class": message_class}
|
time_took = (time.process_time() - start) * 1000
|
||||||
|
# Round to 3 significant figures
|
||||||
|
time_took_rounded = round(
|
||||||
|
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
"took": time_took_rounded,
|
||||||
|
}
|
||||||
|
|
||||||
# For Druid
|
# For Druid
|
||||||
if "error" in response:
|
elif "error" in response:
|
||||||
if "errorMessage" in response:
|
if "errorMessage" in response:
|
||||||
context = {
|
context = {
|
||||||
"message": response["errorMessage"],
|
"message": response["errorMessage"],
|
||||||
@@ -349,9 +300,6 @@ class StorageBackend(ABC):
|
|||||||
return context
|
return context
|
||||||
else:
|
else:
|
||||||
return response
|
return response
|
||||||
if "took" in response:
|
|
||||||
if response["took"] is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Removed for now, no point given we have restricted indexes
|
# Removed for now, no point given we have restricted indexes
|
||||||
# self.filter_blacklisted(user, response)
|
# self.filter_blacklisted(user, response)
|
||||||
@@ -370,6 +318,106 @@ class StorageBackend(ABC):
|
|||||||
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
|
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
|
||||||
return {"object_list": response_parsed, "took": time_took_rounded}
|
return {"object_list": response_parsed, "took": time_took_rounded}
|
||||||
|
|
||||||
|
def construct_context_query(
|
||||||
|
self, index, net, channel, src, num, size, type=None, nicks=None
|
||||||
|
):
|
||||||
|
# Get the initial query
|
||||||
|
query = self.construct_query(None, size, blank=True)
|
||||||
|
|
||||||
|
extra_must = []
|
||||||
|
extra_should = []
|
||||||
|
extra_should2 = []
|
||||||
|
if num:
|
||||||
|
extra_must.append({"match_phrase": {"num": num}})
|
||||||
|
if net:
|
||||||
|
extra_must.append({"match_phrase": {"net": net}})
|
||||||
|
if channel:
|
||||||
|
extra_must.append({"match": {"channel": channel}})
|
||||||
|
if nicks:
|
||||||
|
for nick in nicks:
|
||||||
|
extra_should2.append({"match": {"nick": nick}})
|
||||||
|
|
||||||
|
types = ["msg", "notice", "action", "kick", "topic", "mode"]
|
||||||
|
fields = [
|
||||||
|
"nick",
|
||||||
|
"ident",
|
||||||
|
"host",
|
||||||
|
"channel",
|
||||||
|
"ts",
|
||||||
|
"msg",
|
||||||
|
"type",
|
||||||
|
"net",
|
||||||
|
"src",
|
||||||
|
"tokens",
|
||||||
|
]
|
||||||
|
query["fields"] = fields
|
||||||
|
|
||||||
|
if index == "internal":
|
||||||
|
fields.append("mtype")
|
||||||
|
if channel == "*status" or type == "znc":
|
||||||
|
if {"match": {"channel": channel}} in extra_must:
|
||||||
|
extra_must.remove({"match": {"channel": channel}})
|
||||||
|
extra_should2 = []
|
||||||
|
# Type is one of msg or notice
|
||||||
|
# extra_should.append({"match": {"mtype": "msg"}})
|
||||||
|
# extra_should.append({"match": {"mtype": "notice"}})
|
||||||
|
extra_should.append({"match": {"type": "znc"}})
|
||||||
|
extra_should.append({"match": {"type": "self"}})
|
||||||
|
|
||||||
|
extra_should2.append({"match": {"type": "znc"}})
|
||||||
|
extra_should2.append({"match": {"nick": channel}})
|
||||||
|
elif type == "auth":
|
||||||
|
if {"match": {"channel": channel}} in extra_must:
|
||||||
|
extra_must.remove({"match": {"channel": channel}})
|
||||||
|
extra_should2 = []
|
||||||
|
extra_should2.append({"match": {"nick": channel}})
|
||||||
|
# extra_should2.append({"match": {"mtype": "msg"}})
|
||||||
|
# extra_should2.append({"match": {"mtype": "notice"}})
|
||||||
|
|
||||||
|
extra_should.append({"match": {"type": "query"}})
|
||||||
|
extra_should2.append({"match": {"type": "self"}})
|
||||||
|
extra_should.append({"match": {"nick": channel}})
|
||||||
|
else:
|
||||||
|
for ctype in types:
|
||||||
|
extra_should.append({"match": {"mtype": ctype}})
|
||||||
|
else:
|
||||||
|
for ctype in types:
|
||||||
|
extra_should.append({"match": {"type": ctype}})
|
||||||
|
# query = {
|
||||||
|
# "index": index,
|
||||||
|
# "limit": size,
|
||||||
|
# "query": {
|
||||||
|
# "bool": {
|
||||||
|
# "must": [
|
||||||
|
# # {"equals": {"src": src}},
|
||||||
|
# # {
|
||||||
|
# # "bool": {
|
||||||
|
# # "should": [*extra_should],
|
||||||
|
# # }
|
||||||
|
# # },
|
||||||
|
# # {
|
||||||
|
# # "bool": {
|
||||||
|
# # "should": [*extra_should2],
|
||||||
|
# # }
|
||||||
|
# # },
|
||||||
|
# *extra_must,
|
||||||
|
# ]
|
||||||
|
# }
|
||||||
|
# },
|
||||||
|
# "fields": fields,
|
||||||
|
# # "_source": False,
|
||||||
|
# }
|
||||||
|
if extra_must:
|
||||||
|
for x in extra_must:
|
||||||
|
query["query"]["bool"]["must"].append(x)
|
||||||
|
if extra_should:
|
||||||
|
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
|
||||||
|
if extra_should2:
|
||||||
|
query["query"]["bool"]["must"].append(
|
||||||
|
{"bool": {"should": [*extra_should2]}}
|
||||||
|
)
|
||||||
|
return query
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def query_results(self, **kwargs):
|
def query_results(self, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -6,13 +6,21 @@ from django.conf import settings
|
|||||||
|
|
||||||
from core.db import StorageBackend, add_defaults
|
from core.db import StorageBackend, add_defaults
|
||||||
from core.db.processing import parse_druid
|
from core.db.processing import parse_druid
|
||||||
|
from core.lib.parsing import (
|
||||||
|
parse_date_time,
|
||||||
|
parse_index,
|
||||||
|
parse_sentiment,
|
||||||
|
parse_size,
|
||||||
|
parse_sort,
|
||||||
|
parse_source,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DruidBackend(StorageBackend):
|
class DruidBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Druid")
|
super().__init__("druid")
|
||||||
|
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
# self.client = PyDruid("http://broker:8082", "druid/v2")
|
# self.client = PyDruid("http://broker:8082", "druid/v2")
|
||||||
@@ -155,12 +163,12 @@ class DruidBackend(StorageBackend):
|
|||||||
else:
|
else:
|
||||||
sizes = settings.MAIN_SIZES
|
sizes = settings.MAIN_SIZES
|
||||||
if not size:
|
if not size:
|
||||||
size = self.parse_size(query_params, sizes)
|
size = parse_size(query_params, sizes)
|
||||||
if isinstance(size, dict):
|
if isinstance(size, dict):
|
||||||
return size
|
return size
|
||||||
|
|
||||||
# I - Index
|
# I - Index
|
||||||
index = self.parse_index(request.user, query_params)
|
index = parse_index(request.user, query_params)
|
||||||
if isinstance(index, dict):
|
if isinstance(index, dict):
|
||||||
return index
|
return index
|
||||||
|
|
||||||
@@ -173,7 +181,7 @@ class DruidBackend(StorageBackend):
|
|||||||
return search_query
|
return search_query
|
||||||
|
|
||||||
# S - Sources
|
# S - Sources
|
||||||
sources = self.parse_source(request.user, query_params)
|
sources = parse_source(request.user, query_params)
|
||||||
if isinstance(sources, dict):
|
if isinstance(sources, dict):
|
||||||
return sources
|
return sources
|
||||||
total_count = len(sources)
|
total_count = len(sources)
|
||||||
@@ -182,20 +190,20 @@ class DruidBackend(StorageBackend):
|
|||||||
add_in["src"] = sources
|
add_in["src"] = sources
|
||||||
|
|
||||||
# R - Ranges
|
# R - Ranges
|
||||||
from_ts, to_ts = self.parse_date_time(query_params)
|
from_ts, to_ts = parse_date_time(query_params)
|
||||||
if from_ts:
|
if from_ts:
|
||||||
addendum = f"{from_ts}/{to_ts}"
|
addendum = f"{from_ts}/{to_ts}"
|
||||||
search_query["intervals"] = [addendum]
|
search_query["intervals"] = [addendum]
|
||||||
|
|
||||||
# S - Sort
|
# S - Sort
|
||||||
sort = self.parse_sort(query_params)
|
sort = parse_sort(query_params)
|
||||||
if isinstance(sort, dict):
|
if isinstance(sort, dict):
|
||||||
return sort
|
return sort
|
||||||
if sort:
|
if sort:
|
||||||
search_query["order"] = sort
|
search_query["order"] = sort
|
||||||
|
|
||||||
# S - Sentiment
|
# S - Sentiment
|
||||||
sentiment_r = self.parse_sentiment(query_params)
|
sentiment_r = parse_sentiment(query_params)
|
||||||
if isinstance(sentiment_r, dict):
|
if isinstance(sentiment_r, dict):
|
||||||
return sentiment_r
|
return sentiment_r
|
||||||
if sentiment_r:
|
if sentiment_r:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
# from datetime import datetime, timedelta
|
# from datetime import datetime, timedelta
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from elasticsearch import Elasticsearch
|
from elasticsearch import AsyncElasticsearch, Elasticsearch
|
||||||
from elasticsearch.exceptions import NotFoundError, RequestError
|
from elasticsearch.exceptions import NotFoundError, RequestError
|
||||||
|
|
||||||
from core.db import StorageBackend, add_defaults
|
from core.db import StorageBackend, add_defaults
|
||||||
@@ -10,15 +10,46 @@ from core.db import StorageBackend, add_defaults
|
|||||||
# from json import dumps
|
# from json import dumps
|
||||||
# pp = lambda x: print(dumps(x, indent=2))
|
# pp = lambda x: print(dumps(x, indent=2))
|
||||||
from core.db.processing import parse_results
|
from core.db.processing import parse_results
|
||||||
|
from core.lib.parsing import (
|
||||||
|
QueryError,
|
||||||
|
parse_date_time,
|
||||||
|
parse_index,
|
||||||
|
parse_rule,
|
||||||
|
parse_sentiment,
|
||||||
|
parse_size,
|
||||||
|
parse_sort,
|
||||||
|
parse_source,
|
||||||
|
)
|
||||||
|
|
||||||
|
# These are sometimes numeric, sometimes strings.
|
||||||
|
# If they are seen to be numeric first, ES will erroneously
|
||||||
|
# index them as "long" and then subsequently fail to index messages
|
||||||
|
# with strings in the field.
|
||||||
|
keyword_fields = ["nick_id", "user_id", "net_id"]
|
||||||
|
|
||||||
|
mapping = {
|
||||||
|
"mappings": {
|
||||||
|
"properties": {
|
||||||
|
"ts": {"type": "date", "format": "epoch_second"},
|
||||||
|
"match_ts": {"type": "date", "format": "iso8601"},
|
||||||
|
"file_tim": {"type": "date", "format": "epoch_millis"},
|
||||||
|
"rule_id": {"type": "keyword"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for field in keyword_fields:
|
||||||
|
mapping["mappings"]["properties"][field] = {"type": "text"}
|
||||||
|
|
||||||
|
|
||||||
class ElasticsearchBackend(StorageBackend):
|
class ElasticsearchBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Elasticsearch")
|
super().__init__("elasticsearch")
|
||||||
|
self.client = None
|
||||||
|
self.async_client = None
|
||||||
|
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
Inititialise the Elastuicsearch API endpoint.
|
Inititialise the Elasticsearch API endpoint.
|
||||||
"""
|
"""
|
||||||
auth = (settings.ELASTICSEARCH_USERNAME, settings.ELASTICSEARCH_PASSWORD)
|
auth = (settings.ELASTICSEARCH_USERNAME, settings.ELASTICSEARCH_PASSWORD)
|
||||||
client = Elasticsearch(
|
client = Elasticsearch(
|
||||||
@@ -26,6 +57,43 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
)
|
)
|
||||||
self.client = client
|
self.client = client
|
||||||
|
|
||||||
|
async def async_initialise(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Inititialise the Elasticsearch API endpoint in async mode.
|
||||||
|
"""
|
||||||
|
global mapping
|
||||||
|
auth = (settings.ELASTICSEARCH_USERNAME, settings.ELASTICSEARCH_PASSWORD)
|
||||||
|
client = AsyncElasticsearch(
|
||||||
|
settings.ELASTICSEARCH_URL, http_auth=auth, verify_certs=False
|
||||||
|
)
|
||||||
|
self.async_client = client
|
||||||
|
|
||||||
|
# Create the rule storage indices
|
||||||
|
if await client.indices.exists(index=settings.INDEX_RULE_STORAGE):
|
||||||
|
await client.indices.put_mapping(
|
||||||
|
index=settings.INDEX_RULE_STORAGE,
|
||||||
|
properties=mapping["mappings"]["properties"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await client.indices.create(
|
||||||
|
index=settings.INDEX_RULE_STORAGE, mappings=mapping["mappings"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def delete_rule_entries(self, rule_id):
|
||||||
|
"""
|
||||||
|
Delete all entries for a given rule.
|
||||||
|
:param rule_id: The rule ID to delete.
|
||||||
|
"""
|
||||||
|
if self.client is None:
|
||||||
|
self.initialise()
|
||||||
|
search_query = self.construct_query(None, None, blank=True)
|
||||||
|
search_query["query"]["bool"]["must"].append(
|
||||||
|
{"match_phrase": {"rule_id": rule_id}}
|
||||||
|
)
|
||||||
|
return self.client.delete_by_query(
|
||||||
|
index=settings.INDEX_RULE_STORAGE, body=search_query
|
||||||
|
)
|
||||||
|
|
||||||
def construct_context_query(
|
def construct_context_query(
|
||||||
self, index, net, channel, src, num, size, type=None, nicks=None
|
self, index, net, channel, src, num, size, type=None, nicks=None
|
||||||
):
|
):
|
||||||
@@ -87,7 +155,7 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
extra_should.append({"match": {"nick": channel}})
|
extra_should.append({"match": {"nick": channel}})
|
||||||
else:
|
else:
|
||||||
for ctype in types:
|
for ctype in types:
|
||||||
extra_should.append({"equals": {"mtype": ctype}})
|
extra_should.append({"match": {"mtype": ctype}})
|
||||||
else:
|
else:
|
||||||
for ctype in types:
|
for ctype in types:
|
||||||
extra_should.append({"match": {"type": ctype}})
|
extra_should.append({"match": {"type": ctype}})
|
||||||
@@ -126,14 +194,16 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
)
|
)
|
||||||
return query
|
return query
|
||||||
|
|
||||||
def construct_query(self, query, size, blank=False):
|
def construct_query(self, query, size=None, blank=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Accept some query parameters and construct an Elasticsearch query.
|
Accept some query parameters and construct an Elasticsearch query.
|
||||||
"""
|
"""
|
||||||
query_base = {
|
query_base = {
|
||||||
"size": size,
|
# "size": size,
|
||||||
"query": {"bool": {"must": []}},
|
"query": {"bool": {"must": []}},
|
||||||
}
|
}
|
||||||
|
if size:
|
||||||
|
query_base["size"] = size
|
||||||
query_string = {
|
query_string = {
|
||||||
"query_string": {
|
"query_string": {
|
||||||
"query": query,
|
"query": query,
|
||||||
@@ -163,8 +233,8 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
query_base["query"]["bool"]["must"].append(query_string)
|
query_base["query"]["bool"]["must"].append(query_string)
|
||||||
return query_base
|
return query_base
|
||||||
|
|
||||||
def parse(self, response):
|
def parse(self, response, **kwargs):
|
||||||
parsed = parse_results(response)
|
parsed = parse_results(response, **kwargs)
|
||||||
return parsed
|
return parsed
|
||||||
|
|
||||||
def run_query(self, user, search_query, **kwargs):
|
def run_query(self, user, search_query, **kwargs):
|
||||||
@@ -175,6 +245,8 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
Accept fields and size, for the fields we want to match and the
|
Accept fields and size, for the fields we want to match and the
|
||||||
number of results to return.
|
number of results to return.
|
||||||
"""
|
"""
|
||||||
|
if self.client is None:
|
||||||
|
self.initialise()
|
||||||
index = kwargs.get("index")
|
index = kwargs.get("index")
|
||||||
try:
|
try:
|
||||||
response = self.client.search(body=search_query, index=index)
|
response = self.client.search(body=search_query, index=index)
|
||||||
@@ -186,6 +258,188 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
return err
|
return err
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
async def async_run_query(self, user, search_query, **kwargs):
|
||||||
|
"""
|
||||||
|
Low level helper to run an ES query.
|
||||||
|
Accept a user to pass it to the filter, so we can
|
||||||
|
avoid filtering for superusers.
|
||||||
|
Accept fields and size, for the fields we want to match and the
|
||||||
|
number of results to return.
|
||||||
|
"""
|
||||||
|
if self.async_client is None:
|
||||||
|
await self.async_initialise()
|
||||||
|
index = kwargs.get("index")
|
||||||
|
try:
|
||||||
|
response = await self.async_client.search(body=search_query, index=index)
|
||||||
|
except RequestError as err:
|
||||||
|
print("Elasticsearch error", err)
|
||||||
|
return err
|
||||||
|
except NotFoundError as err:
|
||||||
|
print("Elasticsearch error", err)
|
||||||
|
return err
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def async_store_matches(self, matches):
|
||||||
|
"""
|
||||||
|
Store a list of matches in Elasticsearch.
|
||||||
|
:param index: The index to store the matches in.
|
||||||
|
:param matches: A list of matches to store.
|
||||||
|
"""
|
||||||
|
if self.async_client is None:
|
||||||
|
await self.async_initialise()
|
||||||
|
for match in matches:
|
||||||
|
result = await self.async_client.index(
|
||||||
|
index=settings.INDEX_RULE_STORAGE, body=match
|
||||||
|
)
|
||||||
|
if not result["result"] == "created":
|
||||||
|
self.log.error(f"Indexing failed: {result}")
|
||||||
|
self.log.debug(f"Indexed {len(matches)} messages in ES")
|
||||||
|
|
||||||
|
def store_matches(self, matches):
|
||||||
|
"""
|
||||||
|
Store a list of matches in Elasticsearch.
|
||||||
|
:param index: The index to store the matches in.
|
||||||
|
:param matches: A list of matches to store.
|
||||||
|
"""
|
||||||
|
if self.client is None:
|
||||||
|
self.initialise()
|
||||||
|
for match in matches:
|
||||||
|
result = self.client.index(index=settings.INDEX_RULE_STORAGE, body=match)
|
||||||
|
if not result["result"] == "created":
|
||||||
|
self.log.error(f"Indexing failed: {result}")
|
||||||
|
self.log.debug(f"Indexed {len(matches)} messages in ES")
|
||||||
|
|
||||||
|
def prepare_schedule_query(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query with reduced functionality.
|
||||||
|
"""
|
||||||
|
data = rule_object.parsed
|
||||||
|
|
||||||
|
if "tags" in data:
|
||||||
|
tags = data["tags"]
|
||||||
|
else:
|
||||||
|
tags = []
|
||||||
|
|
||||||
|
if "query" in data:
|
||||||
|
query = data["query"][0]
|
||||||
|
data["query"] = query
|
||||||
|
|
||||||
|
add_bool = []
|
||||||
|
add_top = []
|
||||||
|
if "source" in data:
|
||||||
|
total_count = len(data["source"])
|
||||||
|
total_sources = len(settings.MAIN_SOURCES) + len(
|
||||||
|
settings.SOURCES_RESTRICTED
|
||||||
|
)
|
||||||
|
if total_count != total_sources:
|
||||||
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
|
for source_iter in data["source"]:
|
||||||
|
add_top_tmp["bool"]["should"].append(
|
||||||
|
{"match_phrase": {"src": source_iter}}
|
||||||
|
)
|
||||||
|
add_top.append(add_top_tmp)
|
||||||
|
if "tokens" in data:
|
||||||
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
|
for token in data["tokens"]:
|
||||||
|
add_top_tmp["bool"]["should"].append(
|
||||||
|
{"match_phrase": {"tokens": token}}
|
||||||
|
)
|
||||||
|
add_top.append(add_top_tmp)
|
||||||
|
for field, values in data.items():
|
||||||
|
if field not in ["source", "index", "tags", "query", "sentiment", "tokens"]:
|
||||||
|
for value in values:
|
||||||
|
add_top.append({"match": {field: value}})
|
||||||
|
# Bypass the check for query and tags membership since we can search by msg, etc
|
||||||
|
search_query = self.parse_query(
|
||||||
|
data, tags, None, False, add_bool, bypass_check=True
|
||||||
|
)
|
||||||
|
if rule_object.window is not None:
|
||||||
|
range_query = {
|
||||||
|
"range": {
|
||||||
|
"ts": {
|
||||||
|
"gte": f"now-{rule_object.window}",
|
||||||
|
"lte": "now",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
add_top.append(range_query)
|
||||||
|
self.add_bool(search_query, add_bool)
|
||||||
|
self.add_top(search_query, add_top)
|
||||||
|
# if "sentiment" in data:
|
||||||
|
search_query["aggs"] = {
|
||||||
|
"avg_sentiment": {
|
||||||
|
"avg": {"field": "sentiment"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return search_query
|
||||||
|
|
||||||
|
def schedule_query_results_test_sync(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query test with reduced functionality.
|
||||||
|
Sync version for running from Django forms.
|
||||||
|
Does not return results.
|
||||||
|
"""
|
||||||
|
data = rule_object.parsed
|
||||||
|
|
||||||
|
search_query = self.prepare_schedule_query(rule_object)
|
||||||
|
for index in data["index"]:
|
||||||
|
if "message" in search_query:
|
||||||
|
self.log.error(f"Error parsing test query: {search_query['message']}")
|
||||||
|
continue
|
||||||
|
response = self.run_query(
|
||||||
|
rule_object.user,
|
||||||
|
search_query,
|
||||||
|
index=index,
|
||||||
|
)
|
||||||
|
self.log.debug(f"Running scheduled test query on {index}: {search_query}")
|
||||||
|
# self.log.debug(f"Response from scheduled query: {response}")
|
||||||
|
if isinstance(response, Exception):
|
||||||
|
error = response.info["error"]["root_cause"][0]["reason"]
|
||||||
|
self.log.error(f"Error running test scheduled search: {error}")
|
||||||
|
raise QueryError(error)
|
||||||
|
|
||||||
|
async def schedule_query_results(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query with reduced functionality and async.
|
||||||
|
"""
|
||||||
|
result_map = {}
|
||||||
|
data = rule_object.parsed
|
||||||
|
|
||||||
|
search_query = self.prepare_schedule_query(rule_object)
|
||||||
|
|
||||||
|
for index in data["index"]:
|
||||||
|
if "message" in search_query:
|
||||||
|
self.log.error(f"Error parsing query: {search_query['message']}")
|
||||||
|
continue
|
||||||
|
response = await self.async_run_query(
|
||||||
|
rule_object.user,
|
||||||
|
search_query,
|
||||||
|
index=index,
|
||||||
|
)
|
||||||
|
self.log.debug(f"Running scheduled query on {index}: {search_query}")
|
||||||
|
# self.log.debug(f"Response from scheduled query: {response}")
|
||||||
|
if isinstance(response, Exception):
|
||||||
|
error = response.info["error"]["root_cause"][0]["reason"]
|
||||||
|
self.log.error(f"Error running scheduled search: {error}")
|
||||||
|
raise QueryError(error)
|
||||||
|
if len(response["hits"]["hits"]) == 0:
|
||||||
|
# No results, skip
|
||||||
|
result_map[index] = ({}, [])
|
||||||
|
continue
|
||||||
|
meta, response = self.parse(response, meta=True)
|
||||||
|
# print("Parsed response", response)
|
||||||
|
if "message" in response:
|
||||||
|
self.log.error(f"Error running scheduled search: {response['message']}")
|
||||||
|
continue
|
||||||
|
result_map[index] = (meta, response)
|
||||||
|
|
||||||
|
# Average aggregation check
|
||||||
|
# Could probably do this in elasticsearch
|
||||||
|
result_map = self.schedule_check_aggregations(rule_object, result_map)
|
||||||
|
|
||||||
|
return result_map
|
||||||
|
|
||||||
def query_results(
|
def query_results(
|
||||||
self,
|
self,
|
||||||
request,
|
request,
|
||||||
@@ -198,7 +452,6 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
dedup_fields=None,
|
dedup_fields=None,
|
||||||
tags=None,
|
tags=None,
|
||||||
):
|
):
|
||||||
|
|
||||||
add_bool = []
|
add_bool = []
|
||||||
add_top = []
|
add_top = []
|
||||||
add_top_negative = []
|
add_top_negative = []
|
||||||
@@ -224,12 +477,20 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
else:
|
else:
|
||||||
sizes = settings.MAIN_SIZES
|
sizes = settings.MAIN_SIZES
|
||||||
if not size:
|
if not size:
|
||||||
size = self.parse_size(query_params, sizes)
|
size = parse_size(query_params, sizes)
|
||||||
if isinstance(size, dict):
|
if isinstance(size, dict):
|
||||||
return size
|
return size
|
||||||
|
|
||||||
|
rule_object = parse_rule(request.user, query_params)
|
||||||
|
if isinstance(rule_object, dict):
|
||||||
|
return rule_object
|
||||||
|
|
||||||
|
if rule_object is not None:
|
||||||
|
index = settings.INDEX_RULE_STORAGE
|
||||||
|
add_bool.append({"rule_id": str(rule_object.id)})
|
||||||
|
else:
|
||||||
# I - Index
|
# I - Index
|
||||||
index = self.parse_index(request.user, query_params)
|
index = parse_index(request.user, query_params)
|
||||||
if isinstance(index, dict):
|
if isinstance(index, dict):
|
||||||
return index
|
return index
|
||||||
|
|
||||||
@@ -242,22 +503,30 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
return search_query
|
return search_query
|
||||||
|
|
||||||
# S - Sources
|
# S - Sources
|
||||||
sources = self.parse_source(request.user, query_params)
|
sources = parse_source(request.user, query_params)
|
||||||
if isinstance(sources, dict):
|
if isinstance(sources, dict):
|
||||||
return sources
|
return sources
|
||||||
total_count = len(sources)
|
total_count = len(sources)
|
||||||
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
|
# Total is -1 due to the "all" source
|
||||||
|
total_sources = (
|
||||||
|
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the sources the user has access to are equal to all
|
||||||
|
# possible sources, then we don't need to add the source
|
||||||
|
# filter to the query.
|
||||||
if total_count != total_sources:
|
if total_count != total_sources:
|
||||||
add_top_tmp = {"bool": {"should": []}}
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
for source_iter in sources:
|
for source_iter in sources:
|
||||||
add_top_tmp["bool"]["should"].append(
|
add_top_tmp["bool"]["should"].append(
|
||||||
{"match_phrase": {"src": source_iter}}
|
{"match_phrase": {"src": source_iter}}
|
||||||
)
|
)
|
||||||
|
if query_params["source"] != "all":
|
||||||
add_top.append(add_top_tmp)
|
add_top.append(add_top_tmp)
|
||||||
|
|
||||||
# R - Ranges
|
# R - Ranges
|
||||||
# date_query = False
|
# date_query = False
|
||||||
from_ts, to_ts = self.parse_date_time(query_params)
|
from_ts, to_ts = parse_date_time(query_params)
|
||||||
if from_ts:
|
if from_ts:
|
||||||
range_query = {
|
range_query = {
|
||||||
"range": {
|
"range": {
|
||||||
@@ -270,15 +539,20 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
add_top.append(range_query)
|
add_top.append(range_query)
|
||||||
|
|
||||||
# S - Sort
|
# S - Sort
|
||||||
sort = self.parse_sort(query_params)
|
sort = parse_sort(query_params)
|
||||||
if isinstance(sort, dict):
|
if isinstance(sort, dict):
|
||||||
return sort
|
return sort
|
||||||
|
|
||||||
|
if rule_object is not None:
|
||||||
|
field = "match_ts"
|
||||||
|
else:
|
||||||
|
field = "ts"
|
||||||
if sort:
|
if sort:
|
||||||
# For Druid compatibility
|
# For Druid compatibility
|
||||||
sort_map = {"ascending": "asc", "descending": "desc"}
|
sort_map = {"ascending": "asc", "descending": "desc"}
|
||||||
sorting = [
|
sorting = [
|
||||||
{
|
{
|
||||||
"ts": {
|
field: {
|
||||||
"order": sort_map[sort],
|
"order": sort_map[sort],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -286,28 +560,32 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
search_query["sort"] = sorting
|
search_query["sort"] = sorting
|
||||||
|
|
||||||
# S - Sentiment
|
# S - Sentiment
|
||||||
sentiment_r = self.parse_sentiment(query_params)
|
sentiment_r = parse_sentiment(query_params)
|
||||||
if isinstance(sentiment_r, dict):
|
if isinstance(sentiment_r, dict):
|
||||||
return sentiment_r
|
return sentiment_r
|
||||||
if sentiment_r:
|
if sentiment_r:
|
||||||
|
if rule_object is not None:
|
||||||
|
sentiment_index = "meta.aggs.avg_sentiment.value"
|
||||||
|
else:
|
||||||
|
sentiment_index = "sentiment"
|
||||||
sentiment_method, sentiment = sentiment_r
|
sentiment_method, sentiment = sentiment_r
|
||||||
range_query_compare = {"range": {"sentiment": {}}}
|
range_query_compare = {"range": {sentiment_index: {}}}
|
||||||
range_query_precise = {
|
range_query_precise = {
|
||||||
"match": {
|
"match": {
|
||||||
"sentiment": None,
|
sentiment_index: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if sentiment_method == "below":
|
if sentiment_method == "below":
|
||||||
range_query_compare["range"]["sentiment"]["lt"] = sentiment
|
range_query_compare["range"][sentiment_index]["lt"] = sentiment
|
||||||
add_top.append(range_query_compare)
|
add_top.append(range_query_compare)
|
||||||
elif sentiment_method == "above":
|
elif sentiment_method == "above":
|
||||||
range_query_compare["range"]["sentiment"]["gt"] = sentiment
|
range_query_compare["range"][sentiment_index]["gt"] = sentiment
|
||||||
add_top.append(range_query_compare)
|
add_top.append(range_query_compare)
|
||||||
elif sentiment_method == "exact":
|
elif sentiment_method == "exact":
|
||||||
range_query_precise["match"]["sentiment"] = sentiment
|
range_query_precise["match"][sentiment_index] = sentiment
|
||||||
add_top.append(range_query_precise)
|
add_top.append(range_query_precise)
|
||||||
elif sentiment_method == "nonzero":
|
elif sentiment_method == "nonzero":
|
||||||
range_query_precise["match"]["sentiment"] = 0
|
range_query_precise["match"][sentiment_index] = 0
|
||||||
add_top_negative.append(range_query_precise)
|
add_top_negative.append(range_query_precise)
|
||||||
|
|
||||||
# Add in the additional information we already populated
|
# Add in the additional information we already populated
|
||||||
@@ -371,4 +649,6 @@ class ElasticsearchBackend(StorageBackend):
|
|||||||
search_query["query"]["bool"]["must_not"] = [item]
|
search_query["query"]["bool"]["must_not"] = [item]
|
||||||
else:
|
else:
|
||||||
for item in add_top:
|
for item in add_top:
|
||||||
|
if "query" not in search_query:
|
||||||
|
search_query["query"] = {"bool": {"must": []}}
|
||||||
search_query["query"]["bool"]["must"].append(item)
|
search_query["query"]["bool"]["must"].append(item)
|
||||||
|
|||||||
@@ -1,19 +1,31 @@
|
|||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
import httpx
|
||||||
|
import orjson
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from core.db import StorageBackend, add_defaults, dedup_list
|
from core.db import StorageBackend, add_defaults, dedup_list
|
||||||
from core.db.processing import annotate_results, parse_results
|
from core.db.processing import parse_results
|
||||||
|
from core.lib.parsing import (
|
||||||
|
QueryError,
|
||||||
|
parse_date_time,
|
||||||
|
parse_index,
|
||||||
|
parse_rule,
|
||||||
|
parse_sentiment,
|
||||||
|
parse_size,
|
||||||
|
parse_sort,
|
||||||
|
parse_source,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ManticoreBackend(StorageBackend):
|
class ManticoreBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Manticore")
|
super().__init__("manticore")
|
||||||
|
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
@@ -21,14 +33,27 @@ class ManticoreBackend(StorageBackend):
|
|||||||
"""
|
"""
|
||||||
pass # we use requests
|
pass # we use requests
|
||||||
|
|
||||||
def construct_query(self, query, size, index, blank=False):
|
async def async_initialise(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Initialise the Manticore client in async mode
|
||||||
|
"""
|
||||||
|
pass # we use requests
|
||||||
|
|
||||||
|
def delete_rule_entries(self, rule_id):
|
||||||
|
"""
|
||||||
|
Delete all entries for a given rule.
|
||||||
|
:param rule_id: The rule ID to delete.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
def construct_query(self, query, size=None, blank=False, **kwargs):
|
||||||
"""
|
"""
|
||||||
Accept some query parameters and construct an OpenSearch query.
|
Accept some query parameters and construct an OpenSearch query.
|
||||||
"""
|
"""
|
||||||
if not size:
|
if not size:
|
||||||
size = 5
|
size = 5
|
||||||
query_base = {
|
query_base = {
|
||||||
"index": index,
|
"index": kwargs.get("index"),
|
||||||
"limit": size,
|
"limit": size,
|
||||||
"query": {"bool": {"must": []}},
|
"query": {"bool": {"must": []}},
|
||||||
}
|
}
|
||||||
@@ -39,11 +64,79 @@ class ManticoreBackend(StorageBackend):
|
|||||||
query_base["query"]["bool"]["must"].append(query_string)
|
query_base["query"]["bool"]["must"].append(query_string)
|
||||||
return query_base
|
return query_base
|
||||||
|
|
||||||
def run_query(self, client, user, search_query):
|
def parse(self, response, **kwargs):
|
||||||
|
parsed = parse_results(response, **kwargs)
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
def run_query(self, user, search_query, **kwargs):
|
||||||
|
"""
|
||||||
|
Low level helper to run Manticore query.
|
||||||
|
"""
|
||||||
|
index = kwargs.get("index")
|
||||||
|
raw = kwargs.get("raw")
|
||||||
|
if search_query and not raw:
|
||||||
|
search_query["index"] = index
|
||||||
|
|
||||||
|
|
||||||
|
path = kwargs.get("path", "json/search")
|
||||||
|
if raw:
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
|
f"{settings.MANTICORE_URL}/{path}", search_query
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = requests.post(
|
||||||
|
f"{settings.MANTICORE_URL}/{path}", json=search_query
|
||||||
|
)
|
||||||
|
|
||||||
|
return orjson.loads(response.text)
|
||||||
|
|
||||||
|
async def async_run_query(self, user, search_query, **kwargs):
|
||||||
|
"""
|
||||||
|
Low level helper to run Manticore query asynchronously.
|
||||||
|
"""
|
||||||
|
index = kwargs.get("index")
|
||||||
|
search_query["index"] = index
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
f"{settings.MANTICORE_URL}/json/search", json=search_query
|
f"{settings.MANTICORE_URL}/json/search", json=search_query
|
||||||
)
|
)
|
||||||
return response
|
return orjson.loads(response.text)
|
||||||
|
|
||||||
|
async def async_store_matches(self, matches):
|
||||||
|
"""
|
||||||
|
Store a list of matches in Manticore.
|
||||||
|
:param index: The index to store the matches in.
|
||||||
|
:param matches: A list of matches to store.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
def store_matches(self, matches):
|
||||||
|
"""
|
||||||
|
Store a list of matches in Manticore.
|
||||||
|
:param index: The index to store the matches in.
|
||||||
|
:param matches: A list of matches to store.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
def prepare_schedule_query(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query with reduced functionality.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
def schedule_query_results_test_sync(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query test with reduced functionality.
|
||||||
|
Sync version for running from Django forms.
|
||||||
|
Does not return results.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
|
async def schedule_query_results(self, rule_object):
|
||||||
|
"""
|
||||||
|
Helper to run a scheduled query with reduced functionality and async.
|
||||||
|
"""
|
||||||
|
# TODO
|
||||||
|
|
||||||
def query_results(
|
def query_results(
|
||||||
self,
|
self,
|
||||||
@@ -67,117 +160,77 @@ class ManticoreBackend(StorageBackend):
|
|||||||
query_created = False
|
query_created = False
|
||||||
source = None
|
source = None
|
||||||
add_defaults(query_params)
|
add_defaults(query_params)
|
||||||
# Check size
|
|
||||||
|
# Now, run the helpers for SIQTSRSS/ADR
|
||||||
|
# S - Size
|
||||||
|
# I - Index
|
||||||
|
# Q - Query
|
||||||
|
# T - Tags
|
||||||
|
# S - Source
|
||||||
|
# R - Ranges
|
||||||
|
# S - Sort
|
||||||
|
# S - Sentiment
|
||||||
|
# A - Annotate
|
||||||
|
# D - Dedup
|
||||||
|
# R - Reverse
|
||||||
|
|
||||||
|
# S - Size
|
||||||
if request.user.is_anonymous:
|
if request.user.is_anonymous:
|
||||||
sizes = settings.MANTICORE_MAIN_SIZES_ANON
|
sizes = settings.MAIN_SIZES_ANON
|
||||||
else:
|
else:
|
||||||
sizes = settings.MANTICORE_MAIN_SIZES
|
sizes = settings.MAIN_SIZES
|
||||||
if not size:
|
if not size:
|
||||||
if "size" in query_params:
|
size = parse_size(query_params, sizes)
|
||||||
size = query_params["size"]
|
if isinstance(size, dict):
|
||||||
if size not in sizes:
|
return size
|
||||||
message = "Size is not permitted"
|
|
||||||
message_class = "danger"
|
rule_object = parse_rule(request.user, query_params)
|
||||||
return {"message": message, "class": message_class}
|
if isinstance(rule_object, dict):
|
||||||
size = int(size)
|
return rule_object
|
||||||
|
|
||||||
|
if rule_object is not None:
|
||||||
|
index = settings.INDEX_RULE_STORAGE
|
||||||
|
add_bool.append({"rule_id": str(rule_object.id)})
|
||||||
else:
|
else:
|
||||||
size = 20
|
# I - Index
|
||||||
|
index = parse_index(request.user, query_params)
|
||||||
|
if isinstance(index, dict):
|
||||||
|
return index
|
||||||
|
|
||||||
# Check index
|
# Q/T - Query/Tags
|
||||||
if "index" in query_params:
|
search_query = self.parse_query(
|
||||||
index = query_params["index"]
|
query_params, tags, size, custom_query, add_bool
|
||||||
if index == "main":
|
)
|
||||||
index = settings.MANTICORE_INDEX_MAIN
|
# Query should be a dict, so check if it contains message here
|
||||||
else:
|
if "message" in search_query:
|
||||||
if not request.user.has_perm(f"core.index_{index}"):
|
return search_query
|
||||||
message = "Not permitted to search by this index"
|
|
||||||
message_class = "danger"
|
|
||||||
return {
|
|
||||||
"message": message,
|
|
||||||
"class": message_class,
|
|
||||||
}
|
|
||||||
if index == "meta":
|
|
||||||
index = settings.MANTICORE_INDEX_META
|
|
||||||
elif index == "internal":
|
|
||||||
index = settings.MANTICORE_INDEX_INT
|
|
||||||
else:
|
|
||||||
message = "Index is not valid."
|
|
||||||
message_class = "danger"
|
|
||||||
return {
|
|
||||||
"message": message,
|
|
||||||
"class": message_class,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
index = settings.MANTICORE_INDEX_MAIN
|
|
||||||
|
|
||||||
# Create the search query
|
# S - Sources
|
||||||
if "query" in query_params:
|
sources = parse_source(request.user, query_params)
|
||||||
query = query_params["query"]
|
if isinstance(sources, dict):
|
||||||
search_query = self.construct_query(query, size, index)
|
return sources
|
||||||
query_created = True
|
total_count = len(sources)
|
||||||
else:
|
# Total is -1 due to the "all" source
|
||||||
if custom_query:
|
total_sources = (
|
||||||
search_query = custom_query
|
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
|
||||||
|
)
|
||||||
if tags:
|
|
||||||
# Get a blank search query
|
|
||||||
if not query_created:
|
|
||||||
search_query = self.construct_query(None, size, index, blank=True)
|
|
||||||
query_created = True
|
|
||||||
for tagname, tagvalue in tags.items():
|
|
||||||
add_bool.append({tagname: tagvalue})
|
|
||||||
|
|
||||||
required_any = ["query_full", "query", "tags"]
|
|
||||||
if not any([field in query_params.keys() for field in required_any]):
|
|
||||||
if not custom_query:
|
|
||||||
message = "Empty query!"
|
|
||||||
message_class = "warning"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
|
|
||||||
# Check for a source
|
|
||||||
if "source" in query_params:
|
|
||||||
source = query_params["source"]
|
|
||||||
|
|
||||||
if source in settings.SOURCES_RESTRICTED:
|
|
||||||
if not request.user.has_perm("core.restricted_sources"):
|
|
||||||
message = "Access denied"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
elif source not in settings.MAIN_SOURCES:
|
|
||||||
message = "Invalid source"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
|
|
||||||
if source == "all":
|
|
||||||
source = None # the next block will populate it
|
|
||||||
|
|
||||||
if source:
|
|
||||||
sources = [source]
|
|
||||||
else:
|
|
||||||
sources = list(settings.MAIN_SOURCES)
|
|
||||||
if request.user.has_perm("core.restricted_sources"):
|
|
||||||
for source_iter in settings.SOURCES_RESTRICTED:
|
|
||||||
sources.append(source_iter)
|
|
||||||
|
|
||||||
|
# If the sources the user has access to are equal to all
|
||||||
|
# possible sources, then we don't need to add the source
|
||||||
|
# filter to the query.
|
||||||
|
if total_count != total_sources:
|
||||||
add_top_tmp = {"bool": {"should": []}}
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
total_count = 0
|
|
||||||
for source_iter in sources:
|
for source_iter in sources:
|
||||||
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
|
add_top_tmp["bool"]["should"].append(
|
||||||
total_count += 1
|
{"match_phrase": {"src": source_iter}}
|
||||||
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
|
)
|
||||||
if not total_count == total_sources:
|
if query_params["source"] != "all":
|
||||||
add_top.append(add_top_tmp)
|
add_top.append(add_top_tmp)
|
||||||
|
|
||||||
# Date/time range
|
# R - Ranges
|
||||||
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
|
# date_query = False
|
||||||
query_params.keys()
|
from_ts, to_ts = parse_date_time(query_params)
|
||||||
):
|
if from_ts:
|
||||||
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
|
|
||||||
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
|
|
||||||
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
|
|
||||||
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
|
|
||||||
from_ts = int(from_ts.timestamp())
|
|
||||||
to_ts = int(to_ts.timestamp())
|
|
||||||
range_query = {
|
range_query = {
|
||||||
"range": {
|
"range": {
|
||||||
"ts": {
|
"ts": {
|
||||||
@@ -188,115 +241,87 @@ class ManticoreBackend(StorageBackend):
|
|||||||
}
|
}
|
||||||
add_top.append(range_query)
|
add_top.append(range_query)
|
||||||
|
|
||||||
# Sorting
|
# S - Sort
|
||||||
if "sorting" in query_params:
|
sort = parse_sort(query_params)
|
||||||
sorting = query_params["sorting"]
|
if isinstance(sort, dict):
|
||||||
if sorting not in ("asc", "desc", "none"):
|
return sort
|
||||||
message = "Invalid sort"
|
|
||||||
message_class = "danger"
|
if rule_object is not None:
|
||||||
return {"message": message, "class": message_class}
|
field = "match_ts"
|
||||||
if sorting in ("asc", "desc"):
|
else:
|
||||||
sort = [
|
field = "ts"
|
||||||
|
if sort:
|
||||||
|
# For Druid compatibility
|
||||||
|
sort_map = {"ascending": "asc", "descending": "desc"}
|
||||||
|
sorting = [
|
||||||
{
|
{
|
||||||
"ts": {
|
field: {
|
||||||
"order": sorting,
|
"order": sort_map[sort],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
search_query["sort"] = sorting
|
||||||
|
|
||||||
# Sentiment handling
|
# S - Sentiment
|
||||||
if "check_sentiment" in query_params:
|
sentiment_r = parse_sentiment(query_params)
|
||||||
if "sentiment_method" not in query_params:
|
if isinstance(sentiment_r, dict):
|
||||||
message = "No sentiment method"
|
return sentiment_r
|
||||||
message_class = "danger"
|
if sentiment_r:
|
||||||
return {"message": message, "class": message_class}
|
if rule_object is not None:
|
||||||
if "sentiment" in query_params:
|
sentiment_index = "meta.aggs.avg_sentiment.value"
|
||||||
sentiment = query_params["sentiment"]
|
else:
|
||||||
try:
|
sentiment_index = "sentiment"
|
||||||
sentiment = float(sentiment)
|
sentiment_method, sentiment = sentiment_r
|
||||||
except ValueError:
|
range_query_compare = {"range": {sentiment_index: {}}}
|
||||||
message = "Sentiment is not a float"
|
|
||||||
message_class = "danger"
|
|
||||||
return {"message": message, "class": message_class}
|
|
||||||
sentiment_method = query_params["sentiment_method"]
|
|
||||||
range_query_compare = {"range": {"sentiment": {}}}
|
|
||||||
range_query_precise = {
|
range_query_precise = {
|
||||||
"match": {
|
"match": {
|
||||||
"sentiment": None,
|
sentiment_index: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if sentiment_method == "below":
|
if sentiment_method == "below":
|
||||||
range_query_compare["range"]["sentiment"]["lt"] = sentiment
|
range_query_compare["range"][sentiment_index]["lt"] = sentiment
|
||||||
add_top.append(range_query_compare)
|
add_top.append(range_query_compare)
|
||||||
elif sentiment_method == "above":
|
elif sentiment_method == "above":
|
||||||
range_query_compare["range"]["sentiment"]["gt"] = sentiment
|
range_query_compare["range"][sentiment_index]["gt"] = sentiment
|
||||||
add_top.append(range_query_compare)
|
add_top.append(range_query_compare)
|
||||||
elif sentiment_method == "exact":
|
elif sentiment_method == "exact":
|
||||||
range_query_precise["match"]["sentiment"] = sentiment
|
range_query_precise["match"][sentiment_index] = sentiment
|
||||||
add_top.append(range_query_precise)
|
add_top.append(range_query_precise)
|
||||||
elif sentiment_method == "nonzero":
|
elif sentiment_method == "nonzero":
|
||||||
range_query_precise["match"]["sentiment"] = 0
|
range_query_precise["match"][sentiment_index] = 0
|
||||||
add_top_negative.append(range_query_precise)
|
add_top_negative.append(range_query_precise)
|
||||||
|
|
||||||
if add_bool:
|
# Add in the additional information we already populated
|
||||||
# if "bool" not in search_query["query"]:
|
self.add_bool(search_query, add_bool)
|
||||||
# search_query["query"]["bool"] = {}
|
self.add_top(search_query, add_top)
|
||||||
# if "must" not in search_query["query"]["bool"]:
|
self.add_top(search_query, add_top_negative, negative=True)
|
||||||
# search_query["query"]["bool"] = {"must": []}
|
|
||||||
|
|
||||||
for item in add_bool:
|
response = self.query(
|
||||||
search_query["query"]["bool"]["must"].append({"match": item})
|
request.user,
|
||||||
|
|
||||||
if add_top:
|
|
||||||
for item in add_top:
|
|
||||||
search_query["query"]["bool"]["must"].append(item)
|
|
||||||
if add_top_negative:
|
|
||||||
for item in add_top_negative:
|
|
||||||
if "must_not" in search_query["query"]["bool"]:
|
|
||||||
search_query["query"]["bool"]["must_not"].append(item)
|
|
||||||
else:
|
|
||||||
search_query["query"]["bool"]["must_not"] = [item]
|
|
||||||
if sort:
|
|
||||||
search_query["sort"] = sort
|
|
||||||
|
|
||||||
pprint(search_query)
|
|
||||||
results = self.run_query(
|
|
||||||
self.client,
|
|
||||||
request.user, # passed through run_main_query to filter_blacklisted
|
|
||||||
search_query,
|
search_query,
|
||||||
|
index=index,
|
||||||
)
|
)
|
||||||
if not results:
|
if not response:
|
||||||
message = "Error running query"
|
message = "Error running query"
|
||||||
message_class = "danger"
|
message_class = "danger"
|
||||||
return {"message": message, "class": message_class}
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
# results = results.to_dict()
|
# results = results.to_dict()
|
||||||
if "error" in results:
|
if "error" in response:
|
||||||
message = results["error"]
|
message = response["error"]
|
||||||
message_class = "danger"
|
message_class = "danger"
|
||||||
return {"message": message, "class": message_class}
|
return {"message": message, "class": message_class}
|
||||||
results_parsed = parse_results(results)
|
if "message" in response:
|
||||||
if annotate:
|
return response
|
||||||
annotate_results(results_parsed)
|
|
||||||
if "dedup" in query_params:
|
|
||||||
if query_params["dedup"] == "on":
|
|
||||||
dedup = True
|
|
||||||
else:
|
|
||||||
dedup = False
|
|
||||||
else:
|
|
||||||
dedup = False
|
|
||||||
|
|
||||||
if reverse:
|
# A/D/R - Annotate/Dedup/Reverse
|
||||||
results_parsed = results_parsed[::-1]
|
response["object_list"] = self.process_results(
|
||||||
|
response["object_list"],
|
||||||
|
annotate=annotate,
|
||||||
|
dedup=dedup,
|
||||||
|
dedup_fields=dedup_fields,
|
||||||
|
reverse=reverse,
|
||||||
|
)
|
||||||
|
|
||||||
if dedup:
|
context = response
|
||||||
if not dedup_fields:
|
|
||||||
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
|
|
||||||
results_parsed = dedup_list(results_parsed, dedup_fields)
|
|
||||||
context = {
|
|
||||||
"object_list": results_parsed,
|
|
||||||
"card": results["hits"]["total"],
|
|
||||||
"took": results["took"],
|
|
||||||
}
|
|
||||||
if "cache" in results:
|
|
||||||
context["cache"] = results["cache"]
|
|
||||||
return context
|
return context
|
||||||
|
|||||||
302
core/db/manticore_orig.py
Normal file
302
core/db/manticore_orig.py
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from core.db import StorageBackend, add_defaults, dedup_list
|
||||||
|
from core.db.processing import annotate_results, parse_results
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ManticoreBackend(StorageBackend):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("manticore")
|
||||||
|
|
||||||
|
def initialise(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Initialise the Manticore client
|
||||||
|
"""
|
||||||
|
pass # we use requests
|
||||||
|
|
||||||
|
def construct_query(self, query, size, index, blank=False):
|
||||||
|
"""
|
||||||
|
Accept some query parameters and construct an OpenSearch query.
|
||||||
|
"""
|
||||||
|
if not size:
|
||||||
|
size = 5
|
||||||
|
query_base = {
|
||||||
|
"index": index,
|
||||||
|
"limit": size,
|
||||||
|
"query": {"bool": {"must": []}},
|
||||||
|
}
|
||||||
|
query_string = {
|
||||||
|
"query_string": query,
|
||||||
|
}
|
||||||
|
if not blank:
|
||||||
|
query_base["query"]["bool"]["must"].append(query_string)
|
||||||
|
return query_base
|
||||||
|
|
||||||
|
def run_query(self, client, user, search_query):
|
||||||
|
response = requests.post(
|
||||||
|
f"{settings.MANTICORE_URL}/json/search", json=search_query
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def query_results(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
query_params,
|
||||||
|
size=None,
|
||||||
|
annotate=True,
|
||||||
|
custom_query=False,
|
||||||
|
reverse=False,
|
||||||
|
dedup=False,
|
||||||
|
dedup_fields=None,
|
||||||
|
tags=None,
|
||||||
|
):
|
||||||
|
query = None
|
||||||
|
message = None
|
||||||
|
message_class = None
|
||||||
|
add_bool = []
|
||||||
|
add_top = []
|
||||||
|
add_top_negative = []
|
||||||
|
sort = None
|
||||||
|
query_created = False
|
||||||
|
source = None
|
||||||
|
add_defaults(query_params)
|
||||||
|
# Check size
|
||||||
|
if request.user.is_anonymous:
|
||||||
|
sizes = settings.MANTICORE_MAIN_SIZES_ANON
|
||||||
|
else:
|
||||||
|
sizes = settings.MANTICORE_MAIN_SIZES
|
||||||
|
if not size:
|
||||||
|
if "size" in query_params:
|
||||||
|
size = query_params["size"]
|
||||||
|
if size not in sizes:
|
||||||
|
message = "Size is not permitted"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
size = int(size)
|
||||||
|
else:
|
||||||
|
size = 20
|
||||||
|
|
||||||
|
# Check index
|
||||||
|
if "index" in query_params:
|
||||||
|
index = query_params["index"]
|
||||||
|
if index == "main":
|
||||||
|
index = settings.MANTICORE_INDEX_MAIN
|
||||||
|
else:
|
||||||
|
if not request.user.has_perm(f"core.index_{index}"):
|
||||||
|
message = "Not permitted to search by this index"
|
||||||
|
message_class = "danger"
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
}
|
||||||
|
if index == "meta":
|
||||||
|
index = settings.MANTICORE_INDEX_META
|
||||||
|
elif index == "internal":
|
||||||
|
index = settings.MANTICORE_INDEX_INT
|
||||||
|
else:
|
||||||
|
message = "Index is not valid."
|
||||||
|
message_class = "danger"
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
index = settings.MANTICORE_INDEX_MAIN
|
||||||
|
|
||||||
|
# Create the search query
|
||||||
|
if "query" in query_params:
|
||||||
|
query = query_params["query"]
|
||||||
|
search_query = self.construct_query(query, size, index)
|
||||||
|
query_created = True
|
||||||
|
else:
|
||||||
|
if custom_query:
|
||||||
|
search_query = custom_query
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
# Get a blank search query
|
||||||
|
if not query_created:
|
||||||
|
search_query = self.construct_query(None, size, index, blank=True)
|
||||||
|
query_created = True
|
||||||
|
for tagname, tagvalue in tags.items():
|
||||||
|
add_bool.append({tagname: tagvalue})
|
||||||
|
|
||||||
|
required_any = ["query_full", "query", "tags"]
|
||||||
|
if not any([field in query_params.keys() for field in required_any]):
|
||||||
|
if not custom_query:
|
||||||
|
message = "Empty query!"
|
||||||
|
message_class = "warning"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
|
# Check for a source
|
||||||
|
if "source" in query_params:
|
||||||
|
source = query_params["source"]
|
||||||
|
|
||||||
|
if source in settings.SOURCES_RESTRICTED:
|
||||||
|
if not request.user.has_perm("core.restricted_sources"):
|
||||||
|
message = "Access denied"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
elif source not in settings.MAIN_SOURCES:
|
||||||
|
message = "Invalid source"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
|
if source == "all":
|
||||||
|
source = None # the next block will populate it
|
||||||
|
|
||||||
|
if source:
|
||||||
|
sources = [source]
|
||||||
|
else:
|
||||||
|
sources = list(settings.MAIN_SOURCES)
|
||||||
|
if request.user.has_perm("core.restricted_sources"):
|
||||||
|
for source_iter in settings.SOURCES_RESTRICTED:
|
||||||
|
sources.append(source_iter)
|
||||||
|
|
||||||
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
|
total_count = 0
|
||||||
|
for source_iter in sources:
|
||||||
|
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
|
||||||
|
total_count += 1
|
||||||
|
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
|
||||||
|
if not total_count == total_sources:
|
||||||
|
add_top.append(add_top_tmp)
|
||||||
|
|
||||||
|
# Date/time range
|
||||||
|
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
|
||||||
|
query_params.keys()
|
||||||
|
):
|
||||||
|
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
|
||||||
|
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
|
||||||
|
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
|
||||||
|
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
|
||||||
|
from_ts = int(from_ts.timestamp())
|
||||||
|
to_ts = int(to_ts.timestamp())
|
||||||
|
range_query = {
|
||||||
|
"range": {
|
||||||
|
"ts": {
|
||||||
|
"gt": from_ts,
|
||||||
|
"lt": to_ts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
add_top.append(range_query)
|
||||||
|
|
||||||
|
# Sorting
|
||||||
|
if "sorting" in query_params:
|
||||||
|
sorting = query_params["sorting"]
|
||||||
|
if sorting not in ("asc", "desc", "none"):
|
||||||
|
message = "Invalid sort"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
if sorting in ("asc", "desc"):
|
||||||
|
sort = [
|
||||||
|
{
|
||||||
|
"ts": {
|
||||||
|
"order": sorting,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Sentiment handling
|
||||||
|
if "check_sentiment" in query_params:
|
||||||
|
if "sentiment_method" not in query_params:
|
||||||
|
message = "No sentiment method"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
if "sentiment" in query_params:
|
||||||
|
sentiment = query_params["sentiment"]
|
||||||
|
try:
|
||||||
|
sentiment = float(sentiment)
|
||||||
|
except ValueError:
|
||||||
|
message = "Sentiment is not a float"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
sentiment_method = query_params["sentiment_method"]
|
||||||
|
range_query_compare = {"range": {"sentiment": {}}}
|
||||||
|
range_query_precise = {
|
||||||
|
"match": {
|
||||||
|
"sentiment": None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if sentiment_method == "below":
|
||||||
|
range_query_compare["range"]["sentiment"]["lt"] = sentiment
|
||||||
|
add_top.append(range_query_compare)
|
||||||
|
elif sentiment_method == "above":
|
||||||
|
range_query_compare["range"]["sentiment"]["gt"] = sentiment
|
||||||
|
add_top.append(range_query_compare)
|
||||||
|
elif sentiment_method == "exact":
|
||||||
|
range_query_precise["match"]["sentiment"] = sentiment
|
||||||
|
add_top.append(range_query_precise)
|
||||||
|
elif sentiment_method == "nonzero":
|
||||||
|
range_query_precise["match"]["sentiment"] = 0
|
||||||
|
add_top_negative.append(range_query_precise)
|
||||||
|
|
||||||
|
if add_bool:
|
||||||
|
# if "bool" not in search_query["query"]:
|
||||||
|
# search_query["query"]["bool"] = {}
|
||||||
|
# if "must" not in search_query["query"]["bool"]:
|
||||||
|
# search_query["query"]["bool"] = {"must": []}
|
||||||
|
|
||||||
|
for item in add_bool:
|
||||||
|
search_query["query"]["bool"]["must"].append({"match": item})
|
||||||
|
|
||||||
|
if add_top:
|
||||||
|
for item in add_top:
|
||||||
|
search_query["query"]["bool"]["must"].append(item)
|
||||||
|
if add_top_negative:
|
||||||
|
for item in add_top_negative:
|
||||||
|
if "must_not" in search_query["query"]["bool"]:
|
||||||
|
search_query["query"]["bool"]["must_not"].append(item)
|
||||||
|
else:
|
||||||
|
search_query["query"]["bool"]["must_not"] = [item]
|
||||||
|
if sort:
|
||||||
|
search_query["sort"] = sort
|
||||||
|
|
||||||
|
pprint(search_query)
|
||||||
|
results = self.run_query(
|
||||||
|
self.client,
|
||||||
|
request.user, # passed through run_main_query to filter_blacklisted
|
||||||
|
search_query,
|
||||||
|
)
|
||||||
|
if not results:
|
||||||
|
message = "Error running query"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
# results = results.to_dict()
|
||||||
|
if "error" in results:
|
||||||
|
message = results["error"]
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
results_parsed = parse_results(results)
|
||||||
|
if annotate:
|
||||||
|
annotate_results(results_parsed)
|
||||||
|
if "dedup" in query_params:
|
||||||
|
if query_params["dedup"] == "on":
|
||||||
|
dedup = True
|
||||||
|
else:
|
||||||
|
dedup = False
|
||||||
|
else:
|
||||||
|
dedup = False
|
||||||
|
|
||||||
|
if reverse:
|
||||||
|
results_parsed = results_parsed[::-1]
|
||||||
|
|
||||||
|
if dedup:
|
||||||
|
if not dedup_fields:
|
||||||
|
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
|
||||||
|
results_parsed = dedup_list(results_parsed, dedup_fields)
|
||||||
|
context = {
|
||||||
|
"object_list": results_parsed,
|
||||||
|
"card": results["hits"]["total"],
|
||||||
|
"took": results["took"],
|
||||||
|
}
|
||||||
|
if "cache" in results:
|
||||||
|
context["cache"] = results["cache"]
|
||||||
|
return context
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
import ast
|
||||||
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
|
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
|
||||||
|
|
||||||
|
|
||||||
@@ -39,26 +39,35 @@ def annotate_results(results):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
online_info = None
|
||||||
|
num_users = None
|
||||||
|
num_chans = None
|
||||||
|
if nicks:
|
||||||
online_info = annotate_online(net, nicks)
|
online_info = annotate_online(net, nicks)
|
||||||
# Annotate the number of users in the channel
|
# Annotate the number of users in the channel
|
||||||
|
if channels:
|
||||||
num_users = annotate_num_users(net, channels)
|
num_users = annotate_num_users(net, channels)
|
||||||
# Annotate the number channels the user is on
|
# Annotate the number channels the user is on
|
||||||
|
if nicks:
|
||||||
num_chans = annotate_num_chans(net, nicks)
|
num_chans = annotate_num_chans(net, nicks)
|
||||||
for item in results:
|
for item in results:
|
||||||
if "net" in item:
|
if "net" in item:
|
||||||
if item["net"] == net:
|
if item["net"] == net:
|
||||||
if "nick" in item:
|
if "nick" in item:
|
||||||
|
if online_info:
|
||||||
if item["nick"] in online_info:
|
if item["nick"] in online_info:
|
||||||
item["online"] = online_info[item["nick"]]
|
item["online"] = online_info[item["nick"]]
|
||||||
if "channel" in item:
|
if "channel" in item:
|
||||||
|
if num_users:
|
||||||
if item["channel"] in num_users:
|
if item["channel"] in num_users:
|
||||||
item["num_users"] = num_users[item["channel"]]
|
item["num_users"] = num_users[item["channel"]]
|
||||||
if "nick" in item:
|
if "nick" in item:
|
||||||
|
if num_chans:
|
||||||
if item["nick"] in num_chans:
|
if item["nick"] in num_chans:
|
||||||
item["num_chans"] = num_chans[item["nick"]]
|
item["num_chans"] = num_chans[item["nick"]]
|
||||||
|
|
||||||
|
|
||||||
def parse_results(results):
|
def parse_results(results, meta=None):
|
||||||
results_parsed = []
|
results_parsed = []
|
||||||
stringify = ["host", "channel"]
|
stringify = ["host", "channel"]
|
||||||
if "hits" in results.keys():
|
if "hits" in results.keys():
|
||||||
@@ -83,6 +92,11 @@ def parse_results(results):
|
|||||||
for field in list(element.keys()):
|
for field in list(element.keys()):
|
||||||
if element[field] == "":
|
if element[field] == "":
|
||||||
del element[field]
|
del element[field]
|
||||||
|
# Unfold the tokens
|
||||||
|
if "tokens" in element:
|
||||||
|
if element["tokens"].startswith('["') or element["tokens"].startswith("['"):
|
||||||
|
tokens_parsed = ast.literal_eval(element["tokens"])
|
||||||
|
element["tokens"] = tokens_parsed
|
||||||
|
|
||||||
# Split the timestamp into date and time
|
# Split the timestamp into date and time
|
||||||
if "ts" not in element:
|
if "ts" not in element:
|
||||||
@@ -110,6 +124,16 @@ def parse_results(results):
|
|||||||
else:
|
else:
|
||||||
element["time"] = time
|
element["time"] = time
|
||||||
results_parsed.append(element)
|
results_parsed.append(element)
|
||||||
|
if meta:
|
||||||
|
meta = {"aggs": {}}
|
||||||
|
if "aggregations" in results:
|
||||||
|
for field in ["avg_sentiment"]: # Add other number fields here
|
||||||
|
if field in results["aggregations"]:
|
||||||
|
meta["aggs"][field] = results["aggregations"][field]
|
||||||
|
total_hits = results["hits"]["total"]["value"]
|
||||||
|
meta["total_hits"] = total_hits
|
||||||
|
return (meta, results_parsed)
|
||||||
|
|
||||||
return results_parsed
|
return results_parsed
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
102
core/forms.py
102
core/forms.py
@@ -2,44 +2,15 @@ from django import forms
|
|||||||
from django.contrib.auth.forms import UserCreationForm
|
from django.contrib.auth.forms import UserCreationForm
|
||||||
from django.core.exceptions import FieldDoesNotExist
|
from django.core.exceptions import FieldDoesNotExist
|
||||||
from django.forms import ModelForm
|
from django.forms import ModelForm
|
||||||
|
from mixins.restrictions import RestrictedFormMixin
|
||||||
|
|
||||||
from core.db import QueryError
|
from core.db.storage import db
|
||||||
from core.lib.rules import NotificationRuleData
|
from core.lib.parsing import QueryError
|
||||||
|
from core.lib.rules import NotificationRuleData, RuleParseError
|
||||||
|
|
||||||
from .models import NotificationRule, NotificationSettings, User
|
from .models import NotificationRule, NotificationSettings, User
|
||||||
|
|
||||||
# from django.forms import ModelForm
|
# flake8: noqa: E501
|
||||||
|
|
||||||
|
|
||||||
# Create your forms here.
|
|
||||||
class RestrictedFormMixin:
|
|
||||||
"""
|
|
||||||
This mixin is used to restrict the queryset of a form to the current user.
|
|
||||||
The request object is passed from the view.
|
|
||||||
Fieldargs is used to pass additional arguments to the queryset filter.
|
|
||||||
"""
|
|
||||||
|
|
||||||
fieldargs = {}
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# self.fieldargs = {}
|
|
||||||
self.request = kwargs.pop("request")
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
for field in self.fields:
|
|
||||||
# Check it's not something like a CharField which has no queryset
|
|
||||||
if not hasattr(self.fields[field], "queryset"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
model = self.fields[field].queryset.model
|
|
||||||
# Check if the model has a user field
|
|
||||||
try:
|
|
||||||
model._meta.get_field("user")
|
|
||||||
# Add the user to the queryset filters
|
|
||||||
self.fields[field].queryset = model.objects.filter(
|
|
||||||
user=self.request.user, **self.fieldargs.get(field, {})
|
|
||||||
)
|
|
||||||
except FieldDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NewUserForm(UserCreationForm):
|
class NewUserForm(UserCreationForm):
|
||||||
@@ -71,43 +42,88 @@ class CustomUserCreationForm(UserCreationForm):
|
|||||||
|
|
||||||
|
|
||||||
class NotificationSettingsForm(RestrictedFormMixin, ModelForm):
|
class NotificationSettingsForm(RestrictedFormMixin, ModelForm):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(NotificationSettingsForm, self).__init__(*args, **kwargs)
|
||||||
|
self.fields["url"].label = "URL"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = NotificationSettings
|
model = NotificationSettings
|
||||||
fields = (
|
fields = (
|
||||||
"ntfy_topic",
|
"topic",
|
||||||
"ntfy_url",
|
"url",
|
||||||
|
"service",
|
||||||
)
|
)
|
||||||
help_texts = {
|
help_texts = {
|
||||||
"ntfy_topic": "The topic to send notifications to.",
|
"topic": "The topic to send notifications to.",
|
||||||
"ntfy_url": "Custom NTFY server. Leave blank to use the default server.",
|
"url": "Custom NTFY server/webhook destination. Leave blank to use the default server for NTFY. For webhooks this field is required.",
|
||||||
|
"service": "The service to use for notifications.",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
cleaned_data = super(NotificationSettingsForm, self).clean()
|
||||||
|
if "service" in cleaned_data:
|
||||||
|
if cleaned_data["service"] == "webhook":
|
||||||
|
if not cleaned_data.get("url"):
|
||||||
|
self.add_error(
|
||||||
|
"url",
|
||||||
|
"You must set a URL for webhooks.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NotificationRuleForm(RestrictedFormMixin, ModelForm):
|
class NotificationRuleForm(RestrictedFormMixin, ModelForm):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(NotificationRuleForm, self).__init__(*args, **kwargs)
|
||||||
|
self.fields["url"].label = "URL"
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = NotificationRule
|
model = NotificationRule
|
||||||
fields = (
|
fields = (
|
||||||
"name",
|
"name",
|
||||||
"data",
|
"data",
|
||||||
|
"interval",
|
||||||
|
"window",
|
||||||
|
"amount",
|
||||||
"priority",
|
"priority",
|
||||||
"topic",
|
"topic",
|
||||||
|
"url",
|
||||||
|
"service",
|
||||||
|
"policy",
|
||||||
|
"ingest",
|
||||||
"enabled",
|
"enabled",
|
||||||
)
|
)
|
||||||
help_texts = {
|
help_texts = {
|
||||||
"name": "The name of the rule.",
|
"name": "The name of the rule.",
|
||||||
"priority": "The priority of the rule.",
|
"priority": "The notification priority of the rule.",
|
||||||
|
"url": "Custom NTFY server/webhook destination. Leave blank to use the default server for NTFY. For webhooks this field is required.",
|
||||||
|
"service": "The service to use for notifications",
|
||||||
"topic": "The topic to send notifications to. Leave blank for default.",
|
"topic": "The topic to send notifications to. Leave blank for default.",
|
||||||
"enabled": "Whether the rule is enabled.",
|
"enabled": "Whether the rule is enabled.",
|
||||||
"data": "The notification rule definition.",
|
"data": "The notification rule definition.",
|
||||||
|
"interval": "How often to run the search. On demand evaluates messages as they are received, without running a scheduled search. The remaining options schedule a search of the database with the window below.",
|
||||||
|
"window": "Time window to search: 1d, 1h, 1m, 1s, etc.",
|
||||||
|
"amount": "Amount of matches to be returned for scheduled queries. Cannot be used with on-demand queries.",
|
||||||
|
"policy": "When to trigger this policy.",
|
||||||
|
"ingest": "Whether to ingest matches.",
|
||||||
}
|
}
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
cleaned_data = super(NotificationRuleForm, self).clean()
|
cleaned_data = super(NotificationRuleForm, self).clean()
|
||||||
data = cleaned_data.get("data")
|
|
||||||
|
# TODO: should this be in rules.py?
|
||||||
|
if "service" in cleaned_data:
|
||||||
|
if cleaned_data["service"] == "webhook":
|
||||||
|
if not cleaned_data.get("url"):
|
||||||
|
self.add_error(
|
||||||
|
"url",
|
||||||
|
"You must set a URL for webhooks.",
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
parsed_data = NotificationRuleData(self.request.user, data)
|
# Passing db to avoid circular import
|
||||||
except ValueError as e:
|
parsed_data = NotificationRuleData(self.request.user, cleaned_data, db=db)
|
||||||
self.add_error("data", f"Parsing error: {e}")
|
if cleaned_data["enabled"]:
|
||||||
|
parsed_data.test_schedule()
|
||||||
|
except RuleParseError as e:
|
||||||
|
self.add_error(e.field, f"Parsing error: {e}")
|
||||||
return
|
return
|
||||||
except QueryError as e:
|
except QueryError as e:
|
||||||
self.add_error("data", f"Query error: {e}")
|
self.add_error("data", f"Query error: {e}")
|
||||||
|
|||||||
@@ -84,4 +84,5 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
|
|||||||
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
|
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
|
||||||
if extra_should2:
|
if extra_should2:
|
||||||
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}})
|
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}})
|
||||||
|
|
||||||
return query
|
return query
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from math import ceil
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from numpy import array_split
|
from numpy import array_split
|
||||||
|
|
||||||
from core.db.elastic import client, run_main_query
|
from core.db.storage import db
|
||||||
|
|
||||||
|
|
||||||
def construct_query(net, nicks):
|
def construct_query(net, nicks):
|
||||||
@@ -43,26 +43,13 @@ def get_meta(request, net, nicks, iter=True):
|
|||||||
break
|
break
|
||||||
meta_tmp = []
|
meta_tmp = []
|
||||||
query = construct_query(net, nicks_chunked)
|
query = construct_query(net, nicks_chunked)
|
||||||
results = run_main_query(
|
results = db.query(
|
||||||
client,
|
|
||||||
request.user,
|
request.user,
|
||||||
query,
|
query,
|
||||||
custom_query=True,
|
index=settings.INDEX_META,
|
||||||
index=settings.ELASTICSEARCH_INDEX_META,
|
|
||||||
)
|
)
|
||||||
if "hits" in results.keys():
|
if "object_list" in results.keys():
|
||||||
if "hits" in results["hits"]:
|
for element in results["object_list"]:
|
||||||
for item in results["hits"]["hits"]:
|
|
||||||
element = item["_source"]
|
|
||||||
element["id"] = item["_id"]
|
|
||||||
|
|
||||||
# Split the timestamp into date and time
|
|
||||||
ts = element["ts"]
|
|
||||||
ts_spl = ts.split("T")
|
|
||||||
date = ts_spl[0]
|
|
||||||
time = ts_spl[1]
|
|
||||||
element["date"] = date
|
|
||||||
element["time"] = time
|
|
||||||
meta_tmp.append(element)
|
meta_tmp.append(element)
|
||||||
for x in meta_tmp:
|
for x in meta_tmp:
|
||||||
if x not in meta:
|
if x not in meta:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from math import ceil
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from numpy import array_split
|
from numpy import array_split
|
||||||
|
|
||||||
from core.lib.druid import client, run_main_query
|
from core.db.storage import db
|
||||||
|
|
||||||
|
|
||||||
def construct_query(net, nicks):
|
def construct_query(net, nicks):
|
||||||
@@ -45,7 +45,7 @@ def get_nicks(request, net, nicks, iter=True):
|
|||||||
if len(nicks_chunked) == 0:
|
if len(nicks_chunked) == 0:
|
||||||
break
|
break
|
||||||
query = construct_query(net, nicks_chunked)
|
query = construct_query(net, nicks_chunked)
|
||||||
results = run_main_query(client, request.user, query, custom_query=True)
|
results = db.query(request.user, query)
|
||||||
if "hits" in results.keys():
|
if "hits" in results.keys():
|
||||||
if "hits" in results["hits"]:
|
if "hits" in results["hits"]:
|
||||||
for item in results["hits"]["hits"]:
|
for item in results["hits"]["hits"]:
|
||||||
|
|||||||
@@ -8,9 +8,27 @@ log = logs.get_logger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
# Actual function to send a message to a topic
|
# Actual function to send a message to a topic
|
||||||
def raw_sendmsg(msg, title=None, priority=None, tags=None, url=None, topic=None):
|
def ntfy_sendmsg(**kwargs):
|
||||||
if url is None:
|
"""
|
||||||
url = NTFY_URL
|
Send a message to a topic using NTFY.
|
||||||
|
kwargs:
|
||||||
|
msg: Message to send, must be specified
|
||||||
|
notification_settings: Notification settings, must be specified
|
||||||
|
url: URL to NTFY server, can be None to use default
|
||||||
|
topic: Topic to send message to, must be specified
|
||||||
|
priority: Priority of message, optional
|
||||||
|
title: Title of message, optional
|
||||||
|
tags: Tags to add to message, optional
|
||||||
|
"""
|
||||||
|
msg = kwargs.get("msg", None)
|
||||||
|
notification_settings = kwargs.get("notification_settings")
|
||||||
|
|
||||||
|
title = kwargs.get("title", None)
|
||||||
|
priority = notification_settings.get("priority", None)
|
||||||
|
tags = kwargs.get("tags", None)
|
||||||
|
url = notification_settings.get("url") or NTFY_URL
|
||||||
|
topic = notification_settings.get("topic", None)
|
||||||
|
|
||||||
headers = {"Title": "Fisk"}
|
headers = {"Title": "Fisk"}
|
||||||
if title:
|
if title:
|
||||||
headers["Title"] = title
|
headers["Title"] = title
|
||||||
@@ -18,22 +36,72 @@ def raw_sendmsg(msg, title=None, priority=None, tags=None, url=None, topic=None)
|
|||||||
headers["Priority"] = priority
|
headers["Priority"] = priority
|
||||||
if tags:
|
if tags:
|
||||||
headers["Tags"] = tags
|
headers["Tags"] = tags
|
||||||
|
try:
|
||||||
requests.post(
|
requests.post(
|
||||||
f"{url}/{topic}",
|
f"{url}/{topic}",
|
||||||
data=msg,
|
data=msg,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
|
except requests.exceptions.ConnectionError as e:
|
||||||
|
log.error(f"Error sending notification: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def webhook_sendmsg(**kwargs):
|
||||||
|
"""
|
||||||
|
Send a message to a webhook.
|
||||||
|
kwargs:
|
||||||
|
msg: Message to send, must be specified
|
||||||
|
notification_settings: Notification settings, must be specified
|
||||||
|
url: URL to webhook, must be specified"""
|
||||||
|
msg = kwargs.get("msg", None)
|
||||||
|
notification_settings = kwargs.get("notification_settings")
|
||||||
|
url = notification_settings.get("url")
|
||||||
|
headers = {"Content-type": "application/json"}
|
||||||
|
try:
|
||||||
|
requests.post(
|
||||||
|
f"{url}",
|
||||||
|
headers=headers,
|
||||||
|
data=msg,
|
||||||
|
)
|
||||||
|
except requests.exceptions.ConnectionError as e:
|
||||||
|
log.error(f"Error sending webhook: {e}")
|
||||||
|
|
||||||
|
|
||||||
# Sendmsg helper to send a message to a user's notification settings
|
# Sendmsg helper to send a message to a user's notification settings
|
||||||
def sendmsg(user, *args, **kwargs):
|
def sendmsg(**kwargs):
|
||||||
notification_settings = user.get_notification_settings()
|
"""
|
||||||
|
Send a message to a user's notification settings.
|
||||||
|
Fetches the user's default notification settings if not specified.
|
||||||
|
kwargs:
|
||||||
|
user: User to send message to, must be specified
|
||||||
|
notification_settings: Notification settings, optional
|
||||||
|
service: Notification service to use
|
||||||
|
|
||||||
if "topic" not in kwargs:
|
kwargs for both services:
|
||||||
if notification_settings.ntfy_topic is None:
|
msg: Message to send, must be specified
|
||||||
# No topic set, so don't send
|
notification_settings: Notification settings, must be specified
|
||||||
|
url: URL to NTFY server, can be None to use default
|
||||||
|
|
||||||
|
extra kwargs for ntfy:
|
||||||
|
title: Title of message, optional
|
||||||
|
tags: Tags to add to message, optional
|
||||||
|
notification_settings: Notification settings, must be specified
|
||||||
|
topic: Topic to send message to, must be specified
|
||||||
|
priority: Priority of message, optional
|
||||||
|
"""
|
||||||
|
user = kwargs.get("user", None)
|
||||||
|
notification_settings = kwargs.get(
|
||||||
|
"notification_settings", user.get_notification_settings().__dict__
|
||||||
|
)
|
||||||
|
if not notification_settings:
|
||||||
return
|
return
|
||||||
else:
|
|
||||||
kwargs["topic"] = notification_settings.ntfy_topic
|
|
||||||
|
|
||||||
raw_sendmsg(*args, **kwargs, url=notification_settings.ntfy_url)
|
service = notification_settings.get("service")
|
||||||
|
if service == "none":
|
||||||
|
# Don't send anything
|
||||||
|
return
|
||||||
|
|
||||||
|
if service == "ntfy":
|
||||||
|
ntfy_sendmsg(**kwargs)
|
||||||
|
elif service == "webhook":
|
||||||
|
webhook_sendmsg(**kwargs)
|
||||||
|
|||||||
187
core/lib/parsing.py
Normal file
187
core/lib/parsing.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
from core.models import NotificationRule
|
||||||
|
|
||||||
|
|
||||||
|
class QueryError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def parse_rule(user, query_params):
|
||||||
|
"""
|
||||||
|
Parse a rule query.
|
||||||
|
"""
|
||||||
|
if "rule" in query_params:
|
||||||
|
try:
|
||||||
|
rule_object = NotificationRule.objects.filter(id=query_params["rule"])
|
||||||
|
except ValidationError:
|
||||||
|
message = "Rule is not a valid UUID"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
if not rule_object.exists():
|
||||||
|
message = "Rule does not exist"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
rule_object = rule_object.first()
|
||||||
|
if not rule_object.user == user:
|
||||||
|
message = "Rule does not belong to you"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
return rule_object
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_size(query_params, sizes):
|
||||||
|
if "size" in query_params:
|
||||||
|
size = query_params["size"]
|
||||||
|
if size not in sizes:
|
||||||
|
message = "Size is not permitted"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
size = int(size)
|
||||||
|
else:
|
||||||
|
size = 15
|
||||||
|
|
||||||
|
return size
|
||||||
|
|
||||||
|
|
||||||
|
def parse_index(user, query_params, raise_error=False):
|
||||||
|
if "index" in query_params:
|
||||||
|
index = query_params["index"]
|
||||||
|
if index == "main":
|
||||||
|
index = settings.INDEX_MAIN
|
||||||
|
else:
|
||||||
|
if not user.has_perm(f"core.index_{index}"):
|
||||||
|
message = f"Not permitted to search by this index: {index}"
|
||||||
|
if raise_error:
|
||||||
|
raise QueryError(message)
|
||||||
|
message_class = "danger"
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
}
|
||||||
|
if index == "meta":
|
||||||
|
index = settings.INDEX_META
|
||||||
|
elif index == "internal":
|
||||||
|
index = settings.INDEX_INT
|
||||||
|
elif index == "restricted":
|
||||||
|
if not user.has_perm("core.restricted_sources"):
|
||||||
|
message = f"Not permitted to search by this index: {index}"
|
||||||
|
if raise_error:
|
||||||
|
raise QueryError(message)
|
||||||
|
message_class = "danger"
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
}
|
||||||
|
index = settings.INDEX_RESTRICTED
|
||||||
|
else:
|
||||||
|
message = f"Index is not valid: {index}"
|
||||||
|
if raise_error:
|
||||||
|
raise QueryError(message)
|
||||||
|
message_class = "danger"
|
||||||
|
return {
|
||||||
|
"message": message,
|
||||||
|
"class": message_class,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
index = settings.INDEX_MAIN
|
||||||
|
|
||||||
|
return index
|
||||||
|
|
||||||
|
|
||||||
|
def parse_source(user, query_params, raise_error=False):
|
||||||
|
source = None
|
||||||
|
if "source" in query_params:
|
||||||
|
source = query_params["source"]
|
||||||
|
|
||||||
|
# Validate permissions for restricted sources
|
||||||
|
if source in settings.SOURCES_RESTRICTED:
|
||||||
|
if not user.has_perm("core.restricted_sources"):
|
||||||
|
message = f"Access denied: {source}"
|
||||||
|
if raise_error:
|
||||||
|
raise QueryError(message)
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
|
# Check validity of source
|
||||||
|
elif source not in settings.MAIN_SOURCES:
|
||||||
|
message = f"Invalid source: {source}"
|
||||||
|
if raise_error:
|
||||||
|
raise QueryError(message)
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
|
||||||
|
if source == "all":
|
||||||
|
source = None # the next block will populate it
|
||||||
|
|
||||||
|
if source:
|
||||||
|
sources = [source]
|
||||||
|
else:
|
||||||
|
# Here we need to populate what "all" means for the user.
|
||||||
|
# They may only have access to a subset of the sources.
|
||||||
|
# We build a custom source list with ones they have access
|
||||||
|
# to, and then remove "all" from the list.
|
||||||
|
sources = list(settings.MAIN_SOURCES)
|
||||||
|
if user.has_perm("core.restricted_sources"):
|
||||||
|
# If the user can use restricted sources, add them in.
|
||||||
|
for source_iter in settings.SOURCES_RESTRICTED:
|
||||||
|
sources.append(source_iter)
|
||||||
|
|
||||||
|
# Get rid of "all", it's just a meta-source
|
||||||
|
if "all" in sources:
|
||||||
|
sources.remove("all")
|
||||||
|
|
||||||
|
return sources
|
||||||
|
|
||||||
|
|
||||||
|
def parse_sort(query_params):
|
||||||
|
sort = None
|
||||||
|
if "sorting" in query_params:
|
||||||
|
sorting = query_params["sorting"]
|
||||||
|
if sorting not in ("asc", "desc", "none"):
|
||||||
|
message = "Invalid sort"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
if sorting == "asc":
|
||||||
|
sort = "ascending"
|
||||||
|
elif sorting == "desc":
|
||||||
|
sort = "descending"
|
||||||
|
return sort
|
||||||
|
|
||||||
|
|
||||||
|
def parse_date_time(query_params):
|
||||||
|
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
|
||||||
|
query_params.keys()
|
||||||
|
):
|
||||||
|
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
|
||||||
|
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
|
||||||
|
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
|
||||||
|
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
|
||||||
|
|
||||||
|
return (from_ts, to_ts)
|
||||||
|
return (None, None)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_sentiment(query_params):
|
||||||
|
sentiment = None
|
||||||
|
if "check_sentiment" in query_params:
|
||||||
|
if "sentiment_method" not in query_params:
|
||||||
|
message = "No sentiment method"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
if "sentiment" in query_params:
|
||||||
|
sentiment = query_params["sentiment"]
|
||||||
|
try:
|
||||||
|
sentiment = float(sentiment)
|
||||||
|
except ValueError:
|
||||||
|
message = "Sentiment is not a float"
|
||||||
|
message_class = "danger"
|
||||||
|
return {"message": message, "class": message_class}
|
||||||
|
sentiment_method = query_params["sentiment_method"]
|
||||||
|
|
||||||
|
return (sentiment_method, sentiment)
|
||||||
@@ -2,135 +2,787 @@ from yaml import dump, load
|
|||||||
from yaml.parser import ParserError
|
from yaml.parser import ParserError
|
||||||
from yaml.scanner import ScannerError
|
from yaml.scanner import ScannerError
|
||||||
|
|
||||||
from core.db.storage import db
|
|
||||||
from core.models import NotificationRule
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from yaml import CDumper as Dumper
|
from yaml import CDumper as Dumper
|
||||||
from yaml import CLoader as Loader
|
from yaml import CLoader as Loader
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from yaml import Loader, Dumper
|
from yaml import Loader, Dumper
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import orjson
|
||||||
|
from siphashc import siphash
|
||||||
|
|
||||||
from core.lib.notify import sendmsg
|
from core.lib.notify import sendmsg
|
||||||
|
from core.lib.parsing import parse_index, parse_source
|
||||||
from core.util import logs
|
from core.util import logs
|
||||||
|
|
||||||
log = logs.get_logger("rules")
|
log = logs.get_logger("rules")
|
||||||
|
|
||||||
|
SECONDS_PER_UNIT = {"s": 1, "m": 60, "h": 3600, "d": 86400, "w": 604800}
|
||||||
|
|
||||||
def rule_matched(rule, message, matched_fields):
|
MAX_WINDOW = 2592000
|
||||||
title = f"Rule {rule.name} matched"
|
MAX_AMOUNT_NTFY = 10
|
||||||
|
MAX_AMOUNT_WEBHOOK = 1000
|
||||||
|
HIGH_FREQUENCY_MIN_SEC = 60
|
||||||
|
|
||||||
|
|
||||||
|
class RuleParseError(Exception):
|
||||||
|
def __init__(self, message, field):
|
||||||
|
super().__init__(message)
|
||||||
|
self.field = field
|
||||||
|
|
||||||
|
|
||||||
|
def format_ntfy(**kwargs):
|
||||||
|
"""
|
||||||
|
Format a message for ntfy.
|
||||||
|
If the message is a list, it will be joined with newlines.
|
||||||
|
If the message is None, it will be replaced with an empty string.
|
||||||
|
If specified, `matched` will be pretty-printed in the first line.
|
||||||
|
kwargs:
|
||||||
|
rule: The rule object, must be specified
|
||||||
|
index: The index the rule matched on, can be None
|
||||||
|
message: The message to send, can be None
|
||||||
|
meta:
|
||||||
|
matched: The matched fields, can be None
|
||||||
|
total_hits: The total number of matches, optional
|
||||||
|
"""
|
||||||
|
rule = kwargs.get("rule")
|
||||||
|
index = kwargs.get("index")
|
||||||
|
message = kwargs.get("message")
|
||||||
|
|
||||||
|
meta = kwargs.get("meta", {})
|
||||||
|
total_hits = meta.get("total_hits", 0)
|
||||||
|
matched = meta.get("matched")
|
||||||
|
|
||||||
|
if message:
|
||||||
# Dump the message in YAML for readability
|
# Dump the message in YAML for readability
|
||||||
message = dump(message, Dumper=Dumper, default_flow_style=False)
|
messages_formatted = ""
|
||||||
matched_fields = ", ".join(matched_fields)
|
if isinstance(message, list):
|
||||||
|
for message_iter in message:
|
||||||
|
messages_formatted += dump(
|
||||||
|
message_iter, Dumper=Dumper, default_flow_style=False
|
||||||
|
)
|
||||||
|
messages_formatted += "\n"
|
||||||
|
else:
|
||||||
|
messages_formatted = dump(message, Dumper=Dumper, default_flow_style=False)
|
||||||
|
else:
|
||||||
|
messages_formatted = ""
|
||||||
|
|
||||||
notify_message = f"{rule.name} matched on {matched_fields}\n{message}"
|
if matched:
|
||||||
|
matched = ", ".join([f"{k}: {v}" for k, v in matched.items()])
|
||||||
|
else:
|
||||||
|
matched = ""
|
||||||
|
|
||||||
|
notify_message = f"{rule.name} on {index}: {matched}\n{messages_formatted}"
|
||||||
|
notify_message += f"\nTotal hits: {total_hits}"
|
||||||
notify_message = notify_message.encode("utf-8", "replace")
|
notify_message = notify_message.encode("utf-8", "replace")
|
||||||
|
|
||||||
|
return notify_message
|
||||||
|
|
||||||
|
|
||||||
|
def format_webhook(**kwargs):
|
||||||
|
"""
|
||||||
|
Format a message for a webhook.
|
||||||
|
Adds some metadata to the message that would normally be only in
|
||||||
|
notification_settings.
|
||||||
|
Dumps the message in JSON.
|
||||||
|
kwargs:
|
||||||
|
rule: The rule object, must be specified
|
||||||
|
index: The index the rule matched on, can be None
|
||||||
|
message: The message to send, can be None, but will be sent as None
|
||||||
|
meta:
|
||||||
|
matched: The matched fields, can be None, but will be sent as None
|
||||||
|
total_hits: The total number of matches, optional
|
||||||
|
notification_settings: The notification settings, must be specified
|
||||||
|
priority: The priority of the message, optional
|
||||||
|
topic: The topic of the message, optional
|
||||||
|
"""
|
||||||
|
# rule = kwargs.get("rule")
|
||||||
|
# index = kwargs.get("index")
|
||||||
|
message = kwargs.get("message")
|
||||||
|
meta = kwargs.get("meta")
|
||||||
|
|
||||||
|
notification_settings = kwargs.get("notification_settings")
|
||||||
|
notify_message = {
|
||||||
|
"data": message,
|
||||||
|
"meta": meta,
|
||||||
|
}
|
||||||
|
if "priority" in notification_settings:
|
||||||
|
notify_message["priority"] = notification_settings["priority"]
|
||||||
|
if "topic" in notification_settings:
|
||||||
|
notify_message["topic"] = notification_settings["topic"]
|
||||||
|
notify_message = orjson.dumps(notify_message)
|
||||||
|
|
||||||
|
return notify_message
|
||||||
|
|
||||||
|
|
||||||
|
def rule_notify(rule, index, message, meta=None):
|
||||||
|
"""
|
||||||
|
Send a notification for a matching rule.
|
||||||
|
Gets the notification settings for the rule.
|
||||||
|
Runs the formatting helpers for the service.
|
||||||
|
:param rule: The rule object, must be specified
|
||||||
|
:param index: The index the rule matched on, can be None
|
||||||
|
:param message: The message to send, can be None
|
||||||
|
:param meta: dict of metadata, contains `aggs` key for the matched fields
|
||||||
|
"""
|
||||||
|
# If there is no message, don't say anything matched
|
||||||
|
if message:
|
||||||
|
word = "match"
|
||||||
|
else:
|
||||||
|
word = "no match"
|
||||||
|
|
||||||
|
title = f"Rule {rule.name} {word} on {index}"
|
||||||
|
|
||||||
|
# The user notification settings are merged in with this
|
||||||
|
notification_settings = rule.get_notification_settings()
|
||||||
|
if not notification_settings:
|
||||||
|
# No/invalid notification settings, don't send anything
|
||||||
|
return
|
||||||
|
if notification_settings.get("service") == "none":
|
||||||
|
# Don't send anything
|
||||||
|
return
|
||||||
|
|
||||||
|
# double sigh
|
||||||
|
message_copy = deepcopy(message)
|
||||||
|
for index, _ in enumerate(message_copy):
|
||||||
|
if "meta" in message_copy[index]:
|
||||||
|
del message_copy[index]["meta"]
|
||||||
|
|
||||||
|
# Create a cast we can reuse for the formatting helpers and sendmsg
|
||||||
cast = {
|
cast = {
|
||||||
"title": title,
|
"title": title,
|
||||||
"priority": str(rule.priority),
|
"user": rule.user,
|
||||||
|
"rule": rule,
|
||||||
|
"index": index,
|
||||||
|
"message": message_copy,
|
||||||
|
"notification_settings": notification_settings,
|
||||||
}
|
}
|
||||||
if rule.topic is not None:
|
if meta:
|
||||||
cast["topic"] = rule.topic
|
cast["meta"] = meta
|
||||||
sendmsg(rule.user, notify_message, **cast)
|
|
||||||
|
if rule.service == "ntfy":
|
||||||
|
cast["msg"] = format_ntfy(**cast)
|
||||||
|
|
||||||
|
elif rule.service == "webhook":
|
||||||
|
cast["msg"] = format_webhook(**cast)
|
||||||
|
|
||||||
|
sendmsg(**cast)
|
||||||
|
|
||||||
|
|
||||||
def process_rules(data):
|
class NotificationRuleData(object):
|
||||||
all_rules = NotificationRule.objects.filter(enabled=True)
|
def __init__(self, user, cleaned_data, db):
|
||||||
|
self.user = user
|
||||||
|
self.object = None
|
||||||
|
|
||||||
for index, index_messages in data.items():
|
# We are running live and have been passed a database object
|
||||||
for message in index_messages:
|
if not isinstance(cleaned_data, dict):
|
||||||
for rule in all_rules:
|
self.object = cleaned_data
|
||||||
parsed_rule = rule.parse()
|
cleaned_data = cleaned_data.__dict__
|
||||||
if "index" not in parsed_rule:
|
|
||||||
continue
|
|
||||||
if "source" not in parsed_rule:
|
|
||||||
continue
|
|
||||||
rule_index = parsed_rule["index"]
|
|
||||||
rule_source = parsed_rule["source"]
|
|
||||||
if not type(rule_index) == list:
|
|
||||||
rule_index = [rule_index]
|
|
||||||
if not type(rule_source) == list:
|
|
||||||
rule_source = [rule_source]
|
|
||||||
if index not in rule_index:
|
|
||||||
continue
|
|
||||||
if message["src"] not in rule_source:
|
|
||||||
continue
|
|
||||||
|
|
||||||
rule_field_length = len(parsed_rule.keys())
|
self.cleaned_data = cleaned_data
|
||||||
matched_field_number = 0
|
self.db = db
|
||||||
matched_fields = []
|
self.data = self.cleaned_data.get("data")
|
||||||
for field, value in parsed_rule.items():
|
self.window = self.cleaned_data.get("window")
|
||||||
if not type(value) == list:
|
self.policy = self.cleaned_data.get("policy")
|
||||||
value = [value]
|
self.parsed = None
|
||||||
if field == "src":
|
self.aggs = {}
|
||||||
|
|
||||||
|
self.validate_user_permissions()
|
||||||
|
|
||||||
|
self.parse_data()
|
||||||
|
self.ensure_list()
|
||||||
|
self.validate_permissions()
|
||||||
|
self.validate_schedule_fields()
|
||||||
|
self.validate_time_fields()
|
||||||
|
if self.object is not None:
|
||||||
|
self.populate_matched()
|
||||||
|
|
||||||
|
def clear_database_matches(self):
|
||||||
|
"""
|
||||||
|
Delete all matches for this rule.
|
||||||
|
"""
|
||||||
|
rule_id = str(self.object.id)
|
||||||
|
self.db.delete_rule_entries(rule_id)
|
||||||
|
|
||||||
|
def populate_matched(self):
|
||||||
|
"""
|
||||||
|
On first creation, the match field is None. We need to populate it with
|
||||||
|
a dictionary containing the index names as keys and False as values.
|
||||||
|
"""
|
||||||
|
if self.object.match is None:
|
||||||
|
self.object.match = {}
|
||||||
|
for index in self.parsed["index"]:
|
||||||
|
if index not in self.object.match:
|
||||||
|
self.object.match[index] = False
|
||||||
|
self.object.save()
|
||||||
|
|
||||||
|
def format_matched(self, messages):
|
||||||
|
matched = {}
|
||||||
|
for message in messages:
|
||||||
|
for field, value in self.parsed.items():
|
||||||
|
if field == "msg":
|
||||||
|
# Allow partial matches for msg
|
||||||
|
for msg in value:
|
||||||
|
if "msg" in message:
|
||||||
|
if msg.lower() in message["msg"].lower():
|
||||||
|
matched[field] = msg
|
||||||
|
# Break out of the msg matching loop
|
||||||
|
break
|
||||||
|
# Continue to next field
|
||||||
continue
|
continue
|
||||||
if field == "tokens":
|
if field == "tokens":
|
||||||
|
# Allow partial matches for tokens
|
||||||
for token in value:
|
for token in value:
|
||||||
if "tokens" in message:
|
if "tokens" in message:
|
||||||
if token in message["tokens"]:
|
if token.lower() in [x.lower() for x in message["tokens"]]:
|
||||||
matched_field_number += 1
|
matched[field] = token
|
||||||
matched_fields.append(field)
|
|
||||||
# Break out of the token matching loop
|
# Break out of the token matching loop
|
||||||
break
|
break
|
||||||
# Continue to next field
|
# Continue to next field
|
||||||
continue
|
continue
|
||||||
if field in message and message[field] in value:
|
if field in message and message[field] in value:
|
||||||
matched_field_number += 1
|
# Do exact matches for all other fields
|
||||||
matched_fields.append(field)
|
matched[field] = message[field]
|
||||||
if matched_field_number == rule_field_length - 2:
|
return matched
|
||||||
rule_matched(rule, message, matched_fields)
|
|
||||||
|
|
||||||
|
def store_match(self, index, match):
|
||||||
|
"""
|
||||||
|
Store a match result.
|
||||||
|
Accepts None for the index to set all indices.
|
||||||
|
:param index: the index to store the match for, can be None
|
||||||
|
:param match: the object that matched
|
||||||
|
"""
|
||||||
|
if match is not False:
|
||||||
|
# Dump match to JSON while sorting the keys
|
||||||
|
match_normalised = orjson.dumps(match, option=orjson.OPT_SORT_KEYS)
|
||||||
|
match = siphash(self.db.hash_key, match_normalised)
|
||||||
|
|
||||||
class NotificationRuleData(object):
|
if self.object.match is None:
|
||||||
def __init__(self, user, data):
|
self.object.match = {}
|
||||||
self.user = user
|
if not isinstance(self.object.match, dict):
|
||||||
self.data = data
|
self.object.match = {}
|
||||||
self.parsed = None
|
|
||||||
|
|
||||||
self.parse_data()
|
if index is None:
|
||||||
self.validate_permissions()
|
for index_iter in self.parsed["index"]:
|
||||||
|
self.object.match[index_iter] = match
|
||||||
|
else:
|
||||||
|
self.object.match[index] = match
|
||||||
|
self.object.save()
|
||||||
|
log.debug(f"Stored match: {index} - {match}")
|
||||||
|
|
||||||
|
def get_match(self, index=None, match=None):
|
||||||
|
"""
|
||||||
|
Get a match result for an index.
|
||||||
|
If the index is None, it will return True if any index has a match.
|
||||||
|
:param index: the index to get the match for, can be None
|
||||||
|
"""
|
||||||
|
if self.object.match is None:
|
||||||
|
self.object.match = {}
|
||||||
|
self.object.save()
|
||||||
|
return None
|
||||||
|
if not isinstance(self.object.match, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if index is None:
|
||||||
|
# Check if we have any matches on all indices
|
||||||
|
values = self.object.match.values()
|
||||||
|
if not values:
|
||||||
|
return None
|
||||||
|
return any(values)
|
||||||
|
|
||||||
|
# Check if it's the same hash
|
||||||
|
if match is not None:
|
||||||
|
match_normalised = orjson.dumps(match, option=orjson.OPT_SORT_KEYS)
|
||||||
|
match = siphash(self.db.hash_key, match_normalised)
|
||||||
|
hash_matches = self.object.match.get(index) == match
|
||||||
|
return hash_matches
|
||||||
|
|
||||||
|
returned_match = self.object.match.get(index, None)
|
||||||
|
if type(returned_match) == int:
|
||||||
|
# We are getting a hash from the database,
|
||||||
|
# but we have nothing to check it against.
|
||||||
|
# In this instance, we are checking if we got a match
|
||||||
|
# at all last time. We can confidently say that since
|
||||||
|
# we have a hash, we did.
|
||||||
|
returned_match = True
|
||||||
|
return returned_match
|
||||||
|
|
||||||
|
def format_aggs(self, aggs):
|
||||||
|
"""
|
||||||
|
Format aggregations for the query.
|
||||||
|
We have self.aggs, which contains:
|
||||||
|
{"avg_sentiment": (">", 0.5)}
|
||||||
|
and aggs, which contains:
|
||||||
|
{"avg_sentiment": {"value": 0.6}}
|
||||||
|
It's matched already, we just need to format it like so:
|
||||||
|
{"avg_sentiment": "0.06>0.5"}
|
||||||
|
:param aggs: the aggregations to format
|
||||||
|
:return: the formatted aggregations
|
||||||
|
"""
|
||||||
|
new_aggs = {}
|
||||||
|
for agg_name, agg in aggs.items():
|
||||||
|
if agg_name in self.aggs:
|
||||||
|
op, value = self.aggs[agg_name]
|
||||||
|
new_aggs[agg_name] = f"{agg['value']}{op}{value}"
|
||||||
|
|
||||||
|
return new_aggs
|
||||||
|
|
||||||
|
def reform_matches(self, index, matches, meta, mode):
|
||||||
|
if not isinstance(matches, list):
|
||||||
|
matches = [matches]
|
||||||
|
matches_copy = matches.copy()
|
||||||
|
# match_ts = datetime.utcnow().isoformat()
|
||||||
|
match_ts = int(datetime.utcnow().timestamp())
|
||||||
|
batch_id = uuid.uuid4()
|
||||||
|
|
||||||
|
# Filter empty fields in meta
|
||||||
|
meta = {k: v for k, v in meta.items() if v}
|
||||||
|
|
||||||
|
for match_index, _ in enumerate(matches_copy):
|
||||||
|
matches_copy[match_index]["index"] = index
|
||||||
|
matches_copy[match_index]["rule_id"] = str(self.object.id)
|
||||||
|
matches_copy[match_index]["meta"] = meta
|
||||||
|
matches_copy[match_index]["match_ts"] = match_ts
|
||||||
|
matches_copy[match_index]["mode"] = mode
|
||||||
|
matches_copy[match_index]["batch_id"] = str(batch_id)
|
||||||
|
return matches_copy
|
||||||
|
|
||||||
|
async def ingest_matches(self, index, matches, meta, mode):
|
||||||
|
"""
|
||||||
|
Store all matches for an index.
|
||||||
|
:param index: the index to store the matches for
|
||||||
|
:param matches: the matches to store
|
||||||
|
"""
|
||||||
|
# new_matches = self.reform_matches(index, matches, meta, mode)
|
||||||
|
if self.object.ingest:
|
||||||
|
await self.db.async_store_matches(matches)
|
||||||
|
|
||||||
|
def ingest_matches_sync(self, index, matches, meta, mode):
|
||||||
|
"""
|
||||||
|
Store all matches for an index.
|
||||||
|
:param index: the index to store the matches for
|
||||||
|
:param matches: the matches to store
|
||||||
|
"""
|
||||||
|
# new_matches = self.reform_matches(index, matches, meta, mode)
|
||||||
|
if self.object.ingest:
|
||||||
|
self.db.store_matches(matches)
|
||||||
|
|
||||||
|
async def rule_matched(self, index, message, meta, mode):
|
||||||
|
"""
|
||||||
|
A rule has matched.
|
||||||
|
If the previous run did not match, send a notification after formatting
|
||||||
|
the aggregations.
|
||||||
|
:param index: the index the rule matched on
|
||||||
|
:param message: the message object that matched
|
||||||
|
:param aggs: the aggregations that matched
|
||||||
|
"""
|
||||||
|
current_match = self.get_match(index, message)
|
||||||
|
log.debug(f"Rule matched: {index} - current match: {current_match}")
|
||||||
|
|
||||||
|
last_run_had_matches = current_match is True
|
||||||
|
|
||||||
|
if self.policy in ["change", "default"]:
|
||||||
|
# Change or Default policy, notifying only on new results
|
||||||
|
if last_run_had_matches:
|
||||||
|
# Last run had matches, and this one did too
|
||||||
|
# We don't need to notify
|
||||||
|
return
|
||||||
|
|
||||||
|
elif self.policy == "always":
|
||||||
|
# Only here for completeness, we notify below by default
|
||||||
|
pass
|
||||||
|
|
||||||
|
# We hit the return above if we don't need to notify
|
||||||
|
if "matched" not in meta:
|
||||||
|
meta["matched"] = self.format_matched(message)
|
||||||
|
if "aggs" in meta:
|
||||||
|
aggs_formatted = self.format_aggs(meta["aggs"])
|
||||||
|
if aggs_formatted:
|
||||||
|
meta["matched_aggs"] = aggs_formatted
|
||||||
|
|
||||||
|
meta["is_match"] = True
|
||||||
|
self.store_match(index, message)
|
||||||
|
|
||||||
|
message = self.reform_matches(index, message, meta, mode)
|
||||||
|
rule_notify(self.object, index, message, meta)
|
||||||
|
await self.ingest_matches(index, message, meta, mode)
|
||||||
|
|
||||||
|
def rule_matched_sync(self, index, message, meta, mode):
|
||||||
|
"""
|
||||||
|
A rule has matched.
|
||||||
|
If the previous run did not match, send a notification after formatting
|
||||||
|
the aggregations.
|
||||||
|
:param index: the index the rule matched on
|
||||||
|
:param message: the message object that matched
|
||||||
|
:param aggs: the aggregations that matched
|
||||||
|
"""
|
||||||
|
current_match = self.get_match(index, message)
|
||||||
|
log.debug(f"Rule matched: {index} - current match: {current_match}")
|
||||||
|
|
||||||
|
last_run_had_matches = current_match is True
|
||||||
|
|
||||||
|
if self.policy in ["change", "default"]:
|
||||||
|
# Change or Default policy, notifying only on new results
|
||||||
|
if last_run_had_matches:
|
||||||
|
# Last run had matches, and this one did too
|
||||||
|
# We don't need to notify
|
||||||
|
return
|
||||||
|
|
||||||
|
elif self.policy == "always":
|
||||||
|
# Only here for completeness, we notify below by default
|
||||||
|
pass
|
||||||
|
|
||||||
|
# We hit the return above if we don't need to notify
|
||||||
|
if "matched" not in meta:
|
||||||
|
meta["matched"] = self.format_matched(message)
|
||||||
|
if "aggs" in meta:
|
||||||
|
aggs_formatted = self.format_aggs(meta["aggs"])
|
||||||
|
if aggs_formatted:
|
||||||
|
meta["matched_aggs"] = aggs_formatted
|
||||||
|
|
||||||
|
meta["is_match"] = True
|
||||||
|
self.store_match(index, message)
|
||||||
|
|
||||||
|
message = self.reform_matches(index, message, meta, mode)
|
||||||
|
rule_notify(self.object, index, message, meta)
|
||||||
|
self.ingest_matches_sync(index, message, meta, mode)
|
||||||
|
|
||||||
|
# No async helper for this one as we only need it for schedules
|
||||||
|
async def rule_no_match(self, index=None, message=None, mode=None):
|
||||||
|
"""
|
||||||
|
A rule has not matched.
|
||||||
|
If the previous run did match, send a notification if configured to notify
|
||||||
|
for empty matches.
|
||||||
|
:param index: the index the rule did not match on, can be None
|
||||||
|
|
||||||
|
"""
|
||||||
|
current_match = self.get_match(index)
|
||||||
|
log.debug(
|
||||||
|
f"Rule not matched: {index} - current match: {current_match}: {message}"
|
||||||
|
)
|
||||||
|
|
||||||
|
last_run_had_matches = current_match is True
|
||||||
|
initial = current_match is None
|
||||||
|
|
||||||
|
self.store_match(index, False)
|
||||||
|
|
||||||
|
if self.policy != "always":
|
||||||
|
# We hit the return above if we don't need to notify
|
||||||
|
if self.policy in ["change", "default"]:
|
||||||
|
if not last_run_had_matches and not initial:
|
||||||
|
# We don't need to notify if the last run didn't have matches
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.policy in ["always", "change"]:
|
||||||
|
# Never notify for empty matches on default policy
|
||||||
|
meta = {"msg": message, "is_match": False}
|
||||||
|
matches = [{"msg": None}]
|
||||||
|
message = self.reform_matches(index, matches, meta, mode)
|
||||||
|
rule_notify(self.object, index, matches, meta)
|
||||||
|
await self.ingest_matches(
|
||||||
|
index=index,
|
||||||
|
matches=matches,
|
||||||
|
meta=meta,
|
||||||
|
mode="schedule",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def run_schedule(self):
|
||||||
|
"""
|
||||||
|
Run the schedule query.
|
||||||
|
Get the results from the database, and check if the rule has matched.
|
||||||
|
Check if all of the required aggregations have matched.
|
||||||
|
"""
|
||||||
|
response = await self.db.schedule_query_results(self)
|
||||||
|
if not response:
|
||||||
|
# No results in the result_map
|
||||||
|
await self.rule_no_match(
|
||||||
|
message="No response from database", mode="schedule"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
for index, (meta, results) in response.items():
|
||||||
|
if not results:
|
||||||
|
# Falsy results, no matches
|
||||||
|
await self.rule_no_match(
|
||||||
|
index, message="No results for index", mode="schedule"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Add the match values of all aggregations to a list
|
||||||
|
aggs_for_index = []
|
||||||
|
for agg_name in self.aggs.keys():
|
||||||
|
if agg_name in meta["aggs"]:
|
||||||
|
if "match" in meta["aggs"][agg_name]:
|
||||||
|
aggs_for_index.append(meta["aggs"][agg_name]["match"])
|
||||||
|
|
||||||
|
# All required aggs are present
|
||||||
|
if len(aggs_for_index) == len(self.aggs.keys()):
|
||||||
|
if all(aggs_for_index):
|
||||||
|
# All aggs have matched
|
||||||
|
await self.rule_matched(
|
||||||
|
index, results[: self.object.amount], meta, mode="schedule"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
# Default branch, since the happy path has a continue keyword
|
||||||
|
await self.rule_no_match(
|
||||||
|
index, message="Aggregation did not match", mode="schedule"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_schedule(self):
|
||||||
|
"""
|
||||||
|
Test the schedule query to ensure it is valid.
|
||||||
|
Raises an exception if the query is invalid.
|
||||||
|
"""
|
||||||
|
if self.db:
|
||||||
|
self.db.schedule_query_results_test_sync(self)
|
||||||
|
|
||||||
|
def validate_schedule_fields(self):
|
||||||
|
"""
|
||||||
|
Ensure schedule fields are valid.
|
||||||
|
index: can be a list, it will schedule one search per index.
|
||||||
|
source: can be a list, it will be the filter for each search.
|
||||||
|
tokens: can be list, it will ensure the message matches any token.
|
||||||
|
msg: can be a list, it will ensure the message contains any msg.
|
||||||
|
No other fields can be lists containing more than one item.
|
||||||
|
:raises RuleParseError: if the fields are invalid
|
||||||
|
"""
|
||||||
|
is_schedule = self.is_schedule
|
||||||
|
|
||||||
|
if is_schedule:
|
||||||
|
allowed_list_fields = ["index", "source", "tokens", "msg"]
|
||||||
|
for field, value in self.parsed.items():
|
||||||
|
if field not in allowed_list_fields:
|
||||||
|
if len(value) > 1:
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
f"For scheduled rules, field {field} cannot contain "
|
||||||
|
"more than one item"
|
||||||
|
),
|
||||||
|
"data",
|
||||||
|
)
|
||||||
|
if len(str(value[0])) == 0:
|
||||||
|
raise RuleParseError(f"Field {field} cannot be empty", "data")
|
||||||
|
if "sentiment" in self.parsed:
|
||||||
|
sentiment = str(self.parsed["sentiment"][0])
|
||||||
|
sentiment = sentiment.strip()
|
||||||
|
if sentiment[0] not in [">", "<", "="]:
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
"Sentiment field must be a comparison operator and then a "
|
||||||
|
"float: >0.02"
|
||||||
|
),
|
||||||
|
"data",
|
||||||
|
)
|
||||||
|
operator = sentiment[0]
|
||||||
|
number = sentiment[1:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
number = float(number)
|
||||||
|
except ValueError:
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
"Sentiment field must be a comparison operator and then a "
|
||||||
|
"float: >0.02"
|
||||||
|
),
|
||||||
|
"data",
|
||||||
|
)
|
||||||
|
self.aggs["avg_sentiment"] = (operator, number)
|
||||||
|
|
||||||
|
else:
|
||||||
|
if "query" in self.parsed:
|
||||||
|
raise RuleParseError(
|
||||||
|
"Field query cannot be used with on-demand rules", "data"
|
||||||
|
)
|
||||||
|
if "tags" in self.parsed:
|
||||||
|
raise RuleParseError(
|
||||||
|
"Field tags cannot be used with on-demand rules", "data"
|
||||||
|
)
|
||||||
|
if self.policy != "default":
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
f"Cannot use {self.cleaned_data['policy']} policy with "
|
||||||
|
"on-demand rules"
|
||||||
|
),
|
||||||
|
"policy",
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_schedule(self):
|
||||||
|
"""
|
||||||
|
Check if the rule is a schedule rule.
|
||||||
|
:return: True if the rule is a schedule rule, False otherwise
|
||||||
|
"""
|
||||||
|
if "interval" in self.cleaned_data:
|
||||||
|
if self.cleaned_data["interval"] != 0:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def ensure_list(self):
|
||||||
|
"""
|
||||||
|
Ensure all values in the data field are lists.
|
||||||
|
Convert all strings to lists with one item.
|
||||||
|
"""
|
||||||
|
for field, value in self.parsed.items():
|
||||||
|
if not isinstance(value, list):
|
||||||
|
self.parsed[field] = [value]
|
||||||
|
|
||||||
|
def validate_user_permissions(self):
|
||||||
|
"""
|
||||||
|
Ensure the user can use notification rules.
|
||||||
|
:raises RuleParseError: if the user does not have permission
|
||||||
|
"""
|
||||||
|
if not self.user.has_perm("core.use_rules"):
|
||||||
|
raise RuleParseError("User does not have permission to use rules", "data")
|
||||||
|
|
||||||
|
def validate_time_fields(self):
|
||||||
|
"""
|
||||||
|
Validate the interval and window fields.
|
||||||
|
Prohibit window being specified with an ondemand interval.
|
||||||
|
Prohibit window not being specified with a non-ondemand interval.
|
||||||
|
Prohibit amount being specified with an on-demand interval.
|
||||||
|
Prohibut amount not being specified with a non-ondemand interval.
|
||||||
|
Validate window field.
|
||||||
|
Validate window unit and enforce maximum.
|
||||||
|
:raises RuleParseError: if the fields are invalid
|
||||||
|
"""
|
||||||
|
interval = self.cleaned_data.get("interval")
|
||||||
|
window = self.cleaned_data.get("window")
|
||||||
|
amount = self.cleaned_data.get("amount")
|
||||||
|
service = self.cleaned_data.get("service")
|
||||||
|
|
||||||
|
on_demand = interval == 0
|
||||||
|
|
||||||
|
# Not on demand and interval is too low
|
||||||
|
if not on_demand and interval <= HIGH_FREQUENCY_MIN_SEC:
|
||||||
|
if not self.user.has_perm("core.rules_high_frequency"):
|
||||||
|
raise RuleParseError(
|
||||||
|
"User does not have permission to use high frequency rules", "data"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not on_demand:
|
||||||
|
if not self.user.has_perm("core.rules_scheduled"):
|
||||||
|
raise RuleParseError(
|
||||||
|
"User does not have permission to use scheduled rules", "data"
|
||||||
|
)
|
||||||
|
|
||||||
|
if on_demand and window is not None:
|
||||||
|
# Interval is on demand and window is specified
|
||||||
|
# We can't have a window with on-demand rules
|
||||||
|
raise RuleParseError(
|
||||||
|
"Window cannot be specified with on-demand interval", "window"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not on_demand and window is None:
|
||||||
|
# Interval is not on demand and window is not specified
|
||||||
|
# We can't have a non-on-demand interval without a window
|
||||||
|
raise RuleParseError(
|
||||||
|
"Window must be specified with non-on-demand interval", "window"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not on_demand and amount is None:
|
||||||
|
# Interval is not on demand and amount is not specified
|
||||||
|
# We can't have a non-on-demand interval without an amount
|
||||||
|
raise RuleParseError(
|
||||||
|
"Amount must be specified with non-on-demand interval", "amount"
|
||||||
|
)
|
||||||
|
if on_demand and amount is not None:
|
||||||
|
# Interval is on demand and amount is specified
|
||||||
|
# We can't have an amount with on-demand rules
|
||||||
|
raise RuleParseError(
|
||||||
|
"Amount cannot be specified with on-demand interval", "amount"
|
||||||
|
)
|
||||||
|
|
||||||
|
if window is not None:
|
||||||
|
window_number = window[:-1]
|
||||||
|
if not window_number.isdigit():
|
||||||
|
raise RuleParseError("Window prefix must be a number", "window")
|
||||||
|
window_number = int(window_number)
|
||||||
|
window_unit = window[-1]
|
||||||
|
if window_unit not in SECONDS_PER_UNIT:
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
"Window unit must be one of "
|
||||||
|
f"{', '.join(SECONDS_PER_UNIT.keys())},"
|
||||||
|
f" not '{window_unit}'"
|
||||||
|
),
|
||||||
|
"window",
|
||||||
|
)
|
||||||
|
window_seconds = window_number * SECONDS_PER_UNIT[window_unit]
|
||||||
|
if window_seconds > MAX_WINDOW:
|
||||||
|
raise RuleParseError(
|
||||||
|
f"Window cannot be larger than {MAX_WINDOW} seconds (30 days)",
|
||||||
|
"window",
|
||||||
|
)
|
||||||
|
|
||||||
|
if amount is not None:
|
||||||
|
if service == "ntfy":
|
||||||
|
if amount > MAX_AMOUNT_NTFY:
|
||||||
|
raise RuleParseError(
|
||||||
|
f"Amount cannot be larger than {MAX_AMOUNT_NTFY} for ntfy",
|
||||||
|
"amount",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if amount > MAX_AMOUNT_WEBHOOK:
|
||||||
|
raise RuleParseError(
|
||||||
|
(
|
||||||
|
f"Amount cannot be larger than {MAX_AMOUNT_WEBHOOK} for "
|
||||||
|
f"{service}"
|
||||||
|
),
|
||||||
|
"amount",
|
||||||
|
)
|
||||||
|
|
||||||
def validate_permissions(self):
|
def validate_permissions(self):
|
||||||
"""
|
"""
|
||||||
Validate permissions for the source and index variables.
|
Validate permissions for the source and index variables.
|
||||||
|
Also set the default values for the user if not present.
|
||||||
|
Stores the default or expanded values in the parsed field.
|
||||||
|
:raises QueryError: if the user does not have permission to use the source
|
||||||
"""
|
"""
|
||||||
if "index" in self.parsed:
|
if "index" in self.parsed:
|
||||||
index = self.parsed["index"]
|
index = self.parsed["index"]
|
||||||
if type(index) == list:
|
if type(index) == list:
|
||||||
for i in index:
|
for i in index:
|
||||||
db.parse_index(self.user, {"index": i}, raise_error=True)
|
parse_index(self.user, {"index": i}, raise_error=True)
|
||||||
else:
|
# else:
|
||||||
db.parse_index(self.user, {"index": index}, raise_error=True)
|
# db.parse_index(self.user, {"index": index}, raise_error=True)
|
||||||
else:
|
else:
|
||||||
# Get the default value for the user if not present
|
# Get the default value for the user if not present
|
||||||
index = db.parse_index(self.user, {}, raise_error=True)
|
index = parse_index(self.user, {}, raise_error=True)
|
||||||
self.parsed["index"] = index
|
self.parsed["index"] = [index]
|
||||||
|
|
||||||
if "source" in self.parsed:
|
if "source" in self.parsed:
|
||||||
source = self.parsed["source"]
|
source = self.parsed["source"]
|
||||||
if type(source) == list:
|
if type(source) == list:
|
||||||
for i in source:
|
for i in source:
|
||||||
db.parse_source(self.user, {"source": i}, raise_error=True)
|
parse_source(self.user, {"source": i}, raise_error=True)
|
||||||
else:
|
# else:
|
||||||
db.parse_source(self.user, {"source": source}, raise_error=True)
|
# parse_source(self.user, {"source": source}, raise_error=True)
|
||||||
else:
|
else:
|
||||||
# Get the default value for the user if not present
|
# Get the default value for the user if not present
|
||||||
source = db.parse_source(self.user, {}, raise_error=True)
|
source = parse_source(self.user, {}, raise_error=True)
|
||||||
self.parsed["source"] = source
|
self.parsed["source"] = source
|
||||||
|
|
||||||
def parse_data(self):
|
def parse_data(self):
|
||||||
"""
|
"""
|
||||||
Parse the data in the text field to YAML.
|
Parse the data in the text field to YAML.
|
||||||
|
:raises RuleParseError: if the data is invalid
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self.parsed = load(self.data, Loader=Loader)
|
self.parsed = load(self.data, Loader=Loader)
|
||||||
except (ScannerError, ParserError) as e:
|
except (ScannerError, ParserError) as e:
|
||||||
raise ValueError(f"Invalid YAML: {e}")
|
raise RuleParseError(f"Invalid YAML: {e}", "data")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
"""
|
||||||
|
Get a YAML representation of the data field of the rule.
|
||||||
|
"""
|
||||||
return dump(self.parsed, Dumper=Dumper)
|
return dump(self.parsed, Dumper=Dumper)
|
||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
|
"""
|
||||||
|
Return the data field as a dictionary.
|
||||||
|
"""
|
||||||
return self.parsed
|
return self.parsed
|
||||||
|
|||||||
@@ -1,16 +1,106 @@
|
|||||||
import msgpack
|
import msgpack
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.conf import settings
|
||||||
from redis import StrictRedis
|
from redis import StrictRedis
|
||||||
|
|
||||||
from core.lib.rules import process_rules
|
from core.db.storage import db
|
||||||
|
from core.lib.rules import NotificationRuleData
|
||||||
|
from core.models import NotificationRule
|
||||||
from core.util import logs
|
from core.util import logs
|
||||||
|
|
||||||
log = logs.get_logger("processing")
|
log = logs.get_logger("processing")
|
||||||
|
|
||||||
|
|
||||||
|
def process_rules(data):
|
||||||
|
all_rules = NotificationRule.objects.filter(enabled=True, interval=0)
|
||||||
|
|
||||||
|
for index, index_messages in data.items():
|
||||||
|
for message in index_messages:
|
||||||
|
for rule in all_rules:
|
||||||
|
# Quicker helper to get the data without spinning
|
||||||
|
# up a NotificationRuleData object
|
||||||
|
parsed_rule = rule.parse()
|
||||||
|
matched = {}
|
||||||
|
# Rule is invalid, this shouldn't happen
|
||||||
|
if "index" not in parsed_rule:
|
||||||
|
continue
|
||||||
|
if "source" not in parsed_rule:
|
||||||
|
continue
|
||||||
|
rule_index = parsed_rule["index"]
|
||||||
|
rule_source = parsed_rule["source"]
|
||||||
|
# if not type(rule_index) == list:
|
||||||
|
# rule_index = [rule_index]
|
||||||
|
# if not type(rule_source) == list:
|
||||||
|
# rule_source = [rule_source]
|
||||||
|
if index not in rule_index:
|
||||||
|
# We don't care about this index, go to the next one
|
||||||
|
continue
|
||||||
|
if message["src"] not in rule_source:
|
||||||
|
# We don't care about this source, go to the next one
|
||||||
|
continue
|
||||||
|
|
||||||
|
matched["index"] = index
|
||||||
|
matched["source"] = message["src"]
|
||||||
|
|
||||||
|
rule_field_length = len(parsed_rule.keys())
|
||||||
|
matched_field_number = 0
|
||||||
|
for field, value in parsed_rule.items():
|
||||||
|
# if not type(value) == list:
|
||||||
|
# value = [value]
|
||||||
|
if field == "src":
|
||||||
|
# We already checked this
|
||||||
|
continue
|
||||||
|
if field == "tokens":
|
||||||
|
# Check if tokens are in the rule
|
||||||
|
# We only check if *at least one* token matches
|
||||||
|
for token in value:
|
||||||
|
if "tokens" in message:
|
||||||
|
if token in message["tokens"]:
|
||||||
|
matched_field_number += 1
|
||||||
|
matched[field] = token
|
||||||
|
# Break out of the token matching loop
|
||||||
|
break
|
||||||
|
# Continue to next field
|
||||||
|
continue
|
||||||
|
|
||||||
|
if field == "msg":
|
||||||
|
# Allow partial matches for msg
|
||||||
|
for msg in value:
|
||||||
|
if "msg" in message:
|
||||||
|
if msg.lower() in message["msg"].lower():
|
||||||
|
matched_field_number += 1
|
||||||
|
matched[field] = msg
|
||||||
|
# Break out of the msg matching loop
|
||||||
|
break
|
||||||
|
# Continue to next field
|
||||||
|
continue
|
||||||
|
if field in message and message[field] in value:
|
||||||
|
# Do exact matches for all other fields
|
||||||
|
matched_field_number += 1
|
||||||
|
matched[field] = message[field]
|
||||||
|
# Subtract 2, 1 for source and 1 for index
|
||||||
|
if matched_field_number == rule_field_length - 2:
|
||||||
|
meta = {"matched": matched, "total_hits": 1}
|
||||||
|
|
||||||
|
# Parse the rule, we saved some work above to avoid doing this,
|
||||||
|
# but it makes delivering messages significantly easier as we can
|
||||||
|
# use the same code as for scheduling.
|
||||||
|
rule_data_object = NotificationRuleData(rule.user, rule, db=db)
|
||||||
|
# rule_notify(rule, index, message, meta=meta)
|
||||||
|
rule_data_object.rule_matched_sync(
|
||||||
|
index, message, meta=meta, mode="ondemand"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
|
r = StrictRedis(unix_socket_path="/var/run/neptune-redis.sock", db=10) # To match Monolith DB
|
||||||
|
# r = StrictRedis(
|
||||||
|
# host=settings.REDIS_HOST,
|
||||||
|
# port=settings.REDIS_PORT,
|
||||||
|
# password=settings.REDIS_PASSWORD,
|
||||||
|
# db=settings.REDIS_DB
|
||||||
|
# )
|
||||||
p = r.pubsub()
|
p = r.pubsub()
|
||||||
p.psubscribe("messages")
|
p.psubscribe("messages")
|
||||||
for message in p.listen():
|
for message in p.listen():
|
||||||
|
|||||||
57
core/management/commands/scheduling.py
Normal file
57
core/management/commands/scheduling.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import asyncio
|
||||||
|
|
||||||
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
from asgiref.sync import sync_to_async
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from core.db.storage import db
|
||||||
|
from core.lib.parsing import QueryError
|
||||||
|
from core.lib.rules import NotificationRuleData, RuleParseError
|
||||||
|
from core.models import NotificationRule
|
||||||
|
from core.util import logs
|
||||||
|
|
||||||
|
log = logs.get_logger("scheduling")
|
||||||
|
|
||||||
|
INTERVALS = [5, 60, 900, 1800, 3600, 14400, 86400]
|
||||||
|
|
||||||
|
|
||||||
|
async def job(interval_seconds):
|
||||||
|
"""
|
||||||
|
Run all schedules matching the given interval.
|
||||||
|
:param interval_seconds: The interval to run.
|
||||||
|
"""
|
||||||
|
matching_rules = await sync_to_async(list)(
|
||||||
|
NotificationRule.objects.filter(enabled=True, interval=interval_seconds)
|
||||||
|
)
|
||||||
|
for rule in matching_rules:
|
||||||
|
log.debug(f"Running rule {rule}")
|
||||||
|
try:
|
||||||
|
rule = NotificationRuleData(rule.user, rule, db=db)
|
||||||
|
await rule.run_schedule()
|
||||||
|
# results = await db.schedule_query_results(rule.user, rule)
|
||||||
|
except QueryError as e:
|
||||||
|
log.error(f"Error running rule {rule}: {e}")
|
||||||
|
except RuleParseError as e:
|
||||||
|
log.error(f"Error parsing rule {rule}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
"""
|
||||||
|
Start the scheduling process.
|
||||||
|
"""
|
||||||
|
scheduler = AsyncIOScheduler()
|
||||||
|
for interval in INTERVALS:
|
||||||
|
log.debug(f"Scheduling {interval} second job")
|
||||||
|
scheduler.add_job(job, "interval", seconds=interval, args=[interval])
|
||||||
|
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
scheduler._eventloop = loop
|
||||||
|
scheduler.start()
|
||||||
|
try:
|
||||||
|
loop.run_forever()
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
log.info("Process terminating")
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2023-01-14 14:33
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0015_notificationrule_topic'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='interval',
|
||||||
|
field=models.CharField(choices=[('ondemand', 'On demand'), ('minute', 'Every minute'), ('15m', 'Every 15 minutes'), ('30m', 'Every 30 minutes'), ('hour', 'Every hour'), ('4h', 'Every 4 hours'), ('day', 'Every day'), ('week', 'Every week'), ('month', 'Every month')], default='ondemand', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='window',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
core/migrations/0017_alter_notificationrule_interval.py
Normal file
18
core/migrations/0017_alter_notificationrule_interval.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2023-01-14 14:54
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0016_notificationrule_interval_notificationrule_window'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='interval',
|
||||||
|
field=models.IntegerField(choices=[(0, 'On demand'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-01-15 00:58
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0017_alter_notificationrule_interval'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='perms',
|
||||||
|
options={'permissions': (('post_irc', 'Can post to IRC'), ('post_discord', 'Can post to Discord'), ('use_insights', 'Can use the Insights page'), ('use_rules', 'Can use the Rules page'), ('index_internal', 'Can use the internal index'), ('index_meta', 'Can use the meta index'), ('index_restricted', 'Can use the restricted index'), ('restricted_sources', 'Can access restricted sources'))},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='match',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='interval',
|
||||||
|
field=models.IntegerField(choices=[(0, 'On demand'), (5, 'Every 5 seconds'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
core/migrations/0019_alter_notificationrule_match.py
Normal file
18
core/migrations/0019_alter_notificationrule_match.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-01-15 01:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0018_alter_perms_options_notificationrule_match_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='match',
|
||||||
|
field=models.JSONField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-01-15 18:14
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0019_alter_notificationrule_match'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
old_name='ntfy_topic',
|
||||||
|
new_name='topic',
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='ntfy_url',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(blank=True, choices=[('ntfy', 'NTFY'), ('wehbook', 'Custom webhook')], max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='url',
|
||||||
|
field=models.CharField(blank=True, max_length=1024, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(blank=True, choices=[('ntfy', 'NTFY'), ('wehbook', 'Custom webhook')], max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='url',
|
||||||
|
field=models.CharField(blank=True, max_length=1024, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
28
core/migrations/0021_notificationrule_amount_and_more.py
Normal file
28
core/migrations/0021_notificationrule_amount_and_more.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-01-15 20:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0020_rename_ntfy_topic_notificationsettings_topic_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='amount',
|
||||||
|
field=models.IntegerField(blank=True, default=1, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook')], default='ntfy', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook')], default='ntfy', max_length=255),
|
||||||
|
),
|
||||||
|
]
|
||||||
23
core/migrations/0022_notificationrule_send_empty_and_more.py
Normal file
23
core/migrations/0022_notificationrule_send_empty_and_more.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-01-15 23:34
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0021_notificationrule_amount_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='send_empty',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='amount',
|
||||||
|
field=models.PositiveIntegerField(blank=True, default=1, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
17
core/migrations/0023_alter_perms_options.py
Normal file
17
core/migrations/0023_alter_perms_options.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-02-02 19:07
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0022_notificationrule_send_empty_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='perms',
|
||||||
|
options={'permissions': (('post_irc', 'Can post to IRC'), ('post_discord', 'Can post to Discord'), ('use_insights', 'Can use the Insights page'), ('use_rules', 'Can use the Rules page'), ('rules_scheduled', 'Can use the scheduled rules'), ('rules_high_frequency', 'Can use the high frequency rules'), ('index_internal', 'Can use the internal index'), ('index_meta', 'Can use the meta index'), ('index_restricted', 'Can use the restricted index'), ('restricted_sources', 'Can access restricted sources'))},
|
||||||
|
),
|
||||||
|
]
|
||||||
20
core/migrations/0024_alter_notificationrule_id.py
Normal file
20
core/migrations/0024_alter_notificationrule_id.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-02-02 19:08
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0023_alter_perms_options'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='id',
|
||||||
|
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
|
||||||
|
),
|
||||||
|
]
|
||||||
20
core/migrations/0025_alter_notificationrule_id.py
Normal file
20
core/migrations/0025_alter_notificationrule_id.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-02-02 19:35
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0024_alter_notificationrule_id'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='id',
|
||||||
|
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
28
core/migrations/0026_notificationrule_policy_and_more.py
Normal file
28
core/migrations/0026_notificationrule_policy_and_more.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 4.1.5 on 2023-02-09 14:38
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0025_alter_notificationrule_id'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='policy',
|
||||||
|
field=models.CharField(choices=[('default', 'Only trigger for matched events'), ('change', 'Trigger only if no results found when they were last run'), ('always', 'Always trigger regardless of whether results are found')], default='default', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='ntfy', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='ntfy', max_length=255),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
# Generated by Django 4.1.6 on 2023-02-13 10:18
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0026_notificationrule_policy_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='policy',
|
||||||
|
field=models.CharField(choices=[('default', 'Default: Trigger only when there were no results last time'), ('change', 'Change: Default + trigger when there are no results (if there were before)'), ('always', 'Always: Trigger on every run (not recommended for low intervals)')], default='default', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='topic',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationsettings',
|
||||||
|
name='topic',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 4.1.6 on 2023-02-13 21:03
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0027_alter_notificationrule_policy_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
old_name='send_empty',
|
||||||
|
new_name='ingest',
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='interval',
|
||||||
|
field=models.IntegerField(choices=[(0, 'On demand'), (5, 'Every 5 seconds'), (60, 'Every minute'), (900, 'Every 15 minutes'), (1800, 'Every 30 minutes'), (3600, 'Every hour'), (14400, 'Every 4 hours'), (86400, 'Every day')], default=60),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='service',
|
||||||
|
field=models.CharField(choices=[('ntfy', 'NTFY'), ('webhook', 'Custom webhook'), ('none', 'Disabled')], default='webhook', max_length=255),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='notificationrule',
|
||||||
|
name='window',
|
||||||
|
field=models.CharField(blank=True, default='30d', max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
116
core/models.py
116
core/models.py
@@ -1,6 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
import stripe
|
import stripe
|
||||||
|
from django.conf import settings
|
||||||
from django.contrib.auth.models import AbstractUser
|
from django.contrib.auth.models import AbstractUser
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from yaml import load
|
from yaml import load
|
||||||
@@ -23,6 +25,32 @@ PRIORITY_CHOICES = (
|
|||||||
(5, "max"),
|
(5, "max"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
INTERVAL_CHOICES = (
|
||||||
|
(0, "On demand"),
|
||||||
|
(5, "Every 5 seconds"),
|
||||||
|
(60, "Every minute"),
|
||||||
|
(900, "Every 15 minutes"),
|
||||||
|
(1800, "Every 30 minutes"),
|
||||||
|
(3600, "Every hour"),
|
||||||
|
(14400, "Every 4 hours"),
|
||||||
|
(86400, "Every day"),
|
||||||
|
)
|
||||||
|
|
||||||
|
SERVICE_CHOICES = (
|
||||||
|
("ntfy", "NTFY"),
|
||||||
|
("webhook", "Custom webhook"),
|
||||||
|
("none", "Disabled"),
|
||||||
|
)
|
||||||
|
|
||||||
|
POLICY_CHOICES = (
|
||||||
|
("default", "Default: Trigger only when there were no results last time"),
|
||||||
|
(
|
||||||
|
"change",
|
||||||
|
"Change: Default + trigger when there are no results (if there were before)",
|
||||||
|
),
|
||||||
|
("always", "Always: Trigger on every run (not recommended for low intervals)"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Plan(models.Model):
|
class Plan(models.Model):
|
||||||
name = models.CharField(max_length=255, unique=True)
|
name = models.CharField(max_length=255, unique=True)
|
||||||
@@ -50,6 +78,7 @@ class User(AbstractUser):
|
|||||||
"""
|
"""
|
||||||
Override the save function to create a Stripe customer.
|
Override the save function to create a Stripe customer.
|
||||||
"""
|
"""
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
if not self.stripe_id: # stripe ID not stored
|
if not self.stripe_id: # stripe ID not stored
|
||||||
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name)
|
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name)
|
||||||
|
|
||||||
@@ -61,11 +90,13 @@ class User(AbstractUser):
|
|||||||
if self.last_name != self._original.last_name:
|
if self.last_name != self._original.last_name:
|
||||||
to_update["last_name"] = self.last_name
|
to_update["last_name"] = self.last_name
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
update_customer_fields(self.stripe_id, **to_update)
|
update_customer_fields(self.stripe_id, **to_update)
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
def delete(self, *args, **kwargs):
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
if self.stripe_id:
|
if self.stripe_id:
|
||||||
stripe.Customer.delete(self.stripe_id)
|
stripe.Customer.delete(self.stripe_id)
|
||||||
logger.info(f"Deleted Stripe customer {self.stripe_id}")
|
logger.info(f"Deleted Stripe customer {self.stripe_id}")
|
||||||
@@ -75,8 +106,27 @@ class User(AbstractUser):
|
|||||||
plan_list = [plan.name for plan in self.plans.all()]
|
plan_list = [plan.name for plan in self.plans.all()]
|
||||||
return plan in plan_list
|
return plan in plan_list
|
||||||
|
|
||||||
def get_notification_settings(self):
|
def get_notification_settings(self, check=True):
|
||||||
return NotificationSettings.objects.get_or_create(user=self)[0]
|
sets = NotificationSettings.objects.get_or_create(user=self)[0]
|
||||||
|
if check:
|
||||||
|
if sets.service == "ntfy" and sets.topic is None:
|
||||||
|
return None
|
||||||
|
if sets.service == "webhook" and sets.url is None:
|
||||||
|
return None
|
||||||
|
return sets
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allowed_indices(self):
|
||||||
|
indices = [settings.INDEX_MAIN]
|
||||||
|
if self.has_perm("core.index_meta"):
|
||||||
|
indices.append(settings.INDEX_META)
|
||||||
|
if self.has_perm("core.index_internal"):
|
||||||
|
indices.append(settings.INDEX_INT)
|
||||||
|
if self.has_perm("core.index_restricted"):
|
||||||
|
if self.has_perm("core.restricted_sources"):
|
||||||
|
indices.append(settings.INDEX_RESTRICTED)
|
||||||
|
|
||||||
|
return indices
|
||||||
|
|
||||||
|
|
||||||
class Session(models.Model):
|
class Session(models.Model):
|
||||||
@@ -125,16 +175,12 @@ class ContentBlock(models.Model):
|
|||||||
class Perms(models.Model):
|
class Perms(models.Model):
|
||||||
class Meta:
|
class Meta:
|
||||||
permissions = (
|
permissions = (
|
||||||
("bypass_hashing", "Can bypass field hashing"), #
|
|
||||||
("bypass_blacklist", "Can bypass the blacklist"), #
|
|
||||||
("bypass_encryption", "Can bypass field encryption"), #
|
|
||||||
("bypass_obfuscation", "Can bypass field obfuscation"), #
|
|
||||||
("bypass_delay", "Can bypass data delay"), #
|
|
||||||
("bypass_randomisation", "Can bypass data randomisation"), #
|
|
||||||
("post_irc", "Can post to IRC"),
|
("post_irc", "Can post to IRC"),
|
||||||
("post_discord", "Can post to Discord"),
|
("post_discord", "Can post to Discord"),
|
||||||
("query_search", "Can search with query strings"), #
|
|
||||||
("use_insights", "Can use the Insights page"),
|
("use_insights", "Can use the Insights page"),
|
||||||
|
("use_rules", "Can use the Rules page"),
|
||||||
|
("rules_scheduled", "Can use the scheduled rules"),
|
||||||
|
("rules_high_frequency", "Can use the high frequency rules"),
|
||||||
("index_internal", "Can use the internal index"),
|
("index_internal", "Can use the internal index"),
|
||||||
("index_meta", "Can use the meta index"),
|
("index_meta", "Can use the meta index"),
|
||||||
("index_restricted", "Can use the restricted index"),
|
("index_restricted", "Can use the restricted index"),
|
||||||
@@ -143,12 +189,25 @@ class Perms(models.Model):
|
|||||||
|
|
||||||
|
|
||||||
class NotificationRule(models.Model):
|
class NotificationRule(models.Model):
|
||||||
|
id = models.UUIDField(
|
||||||
|
default=uuid.uuid4, primary_key=True, editable=False, unique=True
|
||||||
|
)
|
||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||||
name = models.CharField(max_length=255)
|
name = models.CharField(max_length=255)
|
||||||
priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1)
|
priority = models.IntegerField(choices=PRIORITY_CHOICES, default=1)
|
||||||
topic = models.CharField(max_length=255, null=True, blank=True)
|
topic = models.CharField(max_length=2048, null=True, blank=True)
|
||||||
|
url = models.CharField(max_length=1024, null=True, blank=True)
|
||||||
|
interval = models.IntegerField(choices=INTERVAL_CHOICES, default=60)
|
||||||
|
window = models.CharField(max_length=255, default="30d", null=True, blank=True)
|
||||||
|
amount = models.PositiveIntegerField(default=1, null=True, blank=True)
|
||||||
enabled = models.BooleanField(default=True)
|
enabled = models.BooleanField(default=True)
|
||||||
data = models.TextField()
|
data = models.TextField()
|
||||||
|
match = models.JSONField(null=True, blank=True)
|
||||||
|
service = models.CharField(
|
||||||
|
choices=SERVICE_CHOICES, max_length=255, default="webhook"
|
||||||
|
)
|
||||||
|
ingest = models.BooleanField(default=False)
|
||||||
|
policy = models.CharField(choices=POLICY_CHOICES, max_length=255, default="default")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.user} - {self.name}"
|
return f"{self.user} - {self.name}"
|
||||||
@@ -160,11 +219,44 @@ class NotificationRule(models.Model):
|
|||||||
raise ValueError(f"Invalid YAML: {e}")
|
raise ValueError(f"Invalid YAML: {e}")
|
||||||
return parsed
|
return parsed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def matches(self):
|
||||||
|
"""
|
||||||
|
Get the total number of matches for this rule.
|
||||||
|
"""
|
||||||
|
if isinstance(self.match, dict):
|
||||||
|
truthy_values = [x for x in self.match.values() if x is not False]
|
||||||
|
return f"{len(truthy_values)}/{len(self.match)}"
|
||||||
|
|
||||||
|
def get_notification_settings(self, check=True):
|
||||||
|
"""
|
||||||
|
Get the notification settings for this rule.
|
||||||
|
Notification rule settings take priority.
|
||||||
|
"""
|
||||||
|
user_settings = self.user.get_notification_settings(check=False)
|
||||||
|
user_settings = user_settings.__dict__
|
||||||
|
if self.priority is not None:
|
||||||
|
user_settings["priority"] = str(self.priority)
|
||||||
|
if self.topic is not None:
|
||||||
|
user_settings["topic"] = self.topic
|
||||||
|
if self.url is not None:
|
||||||
|
user_settings["url"] = self.url
|
||||||
|
if self.service is not None:
|
||||||
|
user_settings["service"] = self.service
|
||||||
|
|
||||||
|
if check:
|
||||||
|
if user_settings["service"] == "ntfy" and user_settings["topic"] is None:
|
||||||
|
return None
|
||||||
|
if user_settings["service"] == "webhook" and user_settings["url"] is None:
|
||||||
|
return None
|
||||||
|
return user_settings
|
||||||
|
|
||||||
|
|
||||||
class NotificationSettings(models.Model):
|
class NotificationSettings(models.Model):
|
||||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
||||||
ntfy_topic = models.CharField(max_length=255, null=True, blank=True)
|
topic = models.CharField(max_length=2048, null=True, blank=True)
|
||||||
ntfy_url = models.CharField(max_length=255, null=True, blank=True)
|
url = models.CharField(max_length=1024, null=True, blank=True)
|
||||||
|
service = models.CharField(choices=SERVICE_CHOICES, max_length=255, default="ntfy")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Notification settings for {self.user}"
|
return f"Notification settings for {self.user}"
|
||||||
|
|||||||
@@ -66,6 +66,11 @@ $(document).ready(function(){
|
|||||||
"file_size": "off",
|
"file_size": "off",
|
||||||
"lang_code": "off",
|
"lang_code": "off",
|
||||||
"tokens": "off",
|
"tokens": "off",
|
||||||
|
"rule_id": "off",
|
||||||
|
"index": "off",
|
||||||
|
"meta": "off",
|
||||||
|
"match_ts": "off",
|
||||||
|
"batch_id": "off"
|
||||||
//"lang_name": "off",
|
//"lang_name": "off",
|
||||||
// "words_noun": "off",
|
// "words_noun": "off",
|
||||||
// "words_adj": "off",
|
// "words_adj": "off",
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
{% load has_plan %}
|
{% load has_plan %}
|
||||||
|
{% load cache %}
|
||||||
|
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en-GB">
|
<html lang="en-GB">
|
||||||
|
{% cache 600 head request.path_info %}
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
@@ -212,6 +214,12 @@
|
|||||||
z-index: 39 !important;
|
z-index: 39 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.small-field {
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
overflow-y: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
<!-- Piwik --> {# Yes it's in the source, fight me #}
|
<!-- Piwik --> {# Yes it's in the source, fight me #}
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
@@ -229,8 +237,9 @@
|
|||||||
<!-- End Piwik Code -->
|
<!-- End Piwik Code -->
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
{% endcache %}
|
||||||
<body>
|
<body>
|
||||||
|
{% cache 600 nav request.user.id %}
|
||||||
<nav class="navbar" role="navigation" aria-label="main navigation">
|
<nav class="navbar" role="navigation" aria-label="main navigation">
|
||||||
<div class="navbar-brand">
|
<div class="navbar-brand">
|
||||||
<a class="navbar-item" href="{% url 'home' %}">
|
<a class="navbar-item" href="{% url 'home' %}">
|
||||||
@@ -271,7 +280,7 @@
|
|||||||
{% if user.is_superuser %}
|
{% if user.is_superuser %}
|
||||||
<div class="navbar-item has-dropdown is-hoverable">
|
<div class="navbar-item has-dropdown is-hoverable">
|
||||||
<a class="navbar-link">
|
<a class="navbar-link">
|
||||||
Threshold
|
Manage
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
<div class="navbar-dropdown">
|
<div class="navbar-dropdown">
|
||||||
@@ -281,14 +290,29 @@
|
|||||||
<a class="navbar-item" href="#">
|
<a class="navbar-item" href="#">
|
||||||
Discord
|
Discord
|
||||||
</a>
|
</a>
|
||||||
|
<a class="navbar-item" href="{% url 'monolith_stats' %}">
|
||||||
|
Stats
|
||||||
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if perms.core.use_insights %}
|
{% if perms.core.use_insights %}
|
||||||
<a class="navbar-item" href="{# url 'insights' #}">
|
<div class="navbar-item has-dropdown is-hoverable">
|
||||||
|
<a class="navbar-link">
|
||||||
Insights
|
Insights
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
|
<div class="navbar-dropdown">
|
||||||
|
{% for index in user.allowed_indices %}
|
||||||
|
{% if index != "meta" %}
|
||||||
|
<a class="navbar-item" href="{% url 'insights' index=index %}">
|
||||||
|
{{ index }}
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<a class="navbar-item add-button">
|
<a class="navbar-item add-button">
|
||||||
Install
|
Install
|
||||||
@@ -316,6 +340,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
|
{% endcache %}
|
||||||
<script>
|
<script>
|
||||||
let deferredPrompt;
|
let deferredPrompt;
|
||||||
const addBtn = document.querySelector('.add-button');
|
const addBtn = document.querySelector('.add-button');
|
||||||
|
|||||||
15
core/templates/manage/monolith/stats/index.html
Normal file
15
core/templates/manage/monolith/stats/index.html
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<div
|
||||||
|
style="display: none;"
|
||||||
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
|
hx-get="{% url 'monolith_stats_db' type='page' %}"
|
||||||
|
hx-trigger="load, every 5s"
|
||||||
|
hx-target="#stats"
|
||||||
|
hx-swap="innerHTML">
|
||||||
|
</div>
|
||||||
|
<div class="box">
|
||||||
|
<div id="stats">
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
14
core/templates/manage/monolith/stats/overview.html
Normal file
14
core/templates/manage/monolith/stats/overview.html
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{% extends 'mixins/partials/generic-detail.html' %}
|
||||||
|
|
||||||
|
{% block tbody %}
|
||||||
|
{% for item in object %}
|
||||||
|
{% if item.data %}
|
||||||
|
{% for row in item.data %}
|
||||||
|
<tr>
|
||||||
|
<th>{{ row.Variable_name }}</th>
|
||||||
|
<td>{{ row.Value }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endblock %}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
{% extends 'wm/modal.html' %}
|
{% extends 'mixins/wm/modal.html' %}
|
||||||
|
|
||||||
{% load index %}
|
{% load index %}
|
||||||
{% load static %}
|
{% load static %}
|
||||||
@@ -42,7 +42,7 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block modal_content %}
|
{% block modal_content %}
|
||||||
{% include 'partials/notify.html' %}
|
{% include 'mixins/partials/notify.html' %}
|
||||||
<div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}">
|
<div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}">
|
||||||
<ul>
|
<ul>
|
||||||
<li class="is-active" data-tab="1">
|
<li class="is-active" data-tab="1">
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{% extends 'wm/modal.html' %}
|
{% extends 'mixins/wm/modal.html' %}
|
||||||
|
|
||||||
{% block modal_content %}
|
{% block modal_content %}
|
||||||
{% include 'window-content/drilldown.html' %}
|
{% include 'window-content/drilldown.html' %}
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
<button class="modal-close is-large" aria-label="close"></button>
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
<i
|
|
||||||
class="fa-solid fa-xmark has-text-grey-light float-right"
|
|
||||||
onclick='grid.removeWidget("widget-{{ unique }}");'></i>
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
<i
|
|
||||||
class="fa-solid fa-xmark has-text-grey-light float-right"
|
|
||||||
data-script="on click remove the closest <nav/>"></i>
|
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
|
{% load cache %}
|
||||||
{% for plan in plans %}
|
{% load cachalot cache %}
|
||||||
|
{% get_last_invalidation 'core.Plan' as last %}
|
||||||
|
{% cache 600 objects_plans request.user.id plans last %}
|
||||||
|
{% for plan in plans %}
|
||||||
<div class="box">
|
<div class="box">
|
||||||
<article class="media">
|
<article class="media">
|
||||||
<div class="media-left">
|
<div class="media-left">
|
||||||
@@ -43,6 +44,5 @@
|
|||||||
</div>
|
</div>
|
||||||
</article>
|
</article>
|
||||||
</div>
|
</div>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
{% endcache %}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
{% extends 'wm/widget.html' %}
|
{% extends 'mixins/wm/widget.html' %}
|
||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
{% block heading %}
|
{% block heading %}
|
||||||
@@ -6,15 +6,16 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block panel_content %}
|
{% block panel_content %}
|
||||||
{% include 'partials/notify.html' %}
|
{% include 'mixins/partials/notify.html' %}
|
||||||
<script src="{% static 'js/column-shifter.js' %}"></script>
|
|
||||||
{% if cache is not None %}
|
{% if cache is not None %}
|
||||||
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
|
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
|
||||||
<i class="fa-solid fa-database"></i>
|
<i class="fa-solid fa-database"></i>
|
||||||
</span>
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
fetched {{ table.data|length }} hits in {{ took }}ms
|
fetched {{ table.data|length }}
|
||||||
|
{% if params.rule is None %} hits {% else %} rule hits for {{ params.rule }}{% endif %}
|
||||||
|
in {{ took }}ms
|
||||||
|
|
||||||
{% if exemption is not None %}
|
{% if exemption is not None %}
|
||||||
<span class="icon has-tooltip-bottom" data-tooltip="God mode">
|
<span class="icon has-tooltip-bottom" data-tooltip="God mode">
|
||||||
|
|||||||
@@ -3,7 +3,12 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
{% load joinsep %}
|
{% load joinsep %}
|
||||||
{% load urlsafe %}
|
{% load urlsafe %}
|
||||||
{% block table-wrapper %}
|
{% load pretty %}
|
||||||
|
{% load splitstr %}
|
||||||
|
{% load cache %}
|
||||||
|
|
||||||
|
{% cache 3600 results_table_full request.user.id table %}
|
||||||
|
{% block table-wrapper %}
|
||||||
<script src="{% static 'js/column-shifter.js' %}"></script>
|
<script src="{% static 'js/column-shifter.js' %}"></script>
|
||||||
<div id="drilldown-table" class="column-shifter-container" style="position:relative; z-index:1;">
|
<div id="drilldown-table" class="column-shifter-container" style="position:relative; z-index:1;">
|
||||||
{% block table %}
|
{% block table %}
|
||||||
@@ -167,6 +172,14 @@
|
|||||||
<p>{{ row.cells.date }}</p>
|
<p>{{ row.cells.date }}</p>
|
||||||
<p>{{ row.cells.time }}</p>
|
<p>{{ row.cells.time }}</p>
|
||||||
</td>
|
</td>
|
||||||
|
{% elif column.name == 'match_ts' %}
|
||||||
|
<td class="{{ column.name }}">
|
||||||
|
<!-- {# with match_ts=cell|splitstr:'T' %}
|
||||||
|
<p>{{ match_ts.0 }}</p>
|
||||||
|
<p>{{ match_ts.1 }}</p>
|
||||||
|
{% endwith #} -->
|
||||||
|
<p>{{ match_ts }}</p>
|
||||||
|
</td>
|
||||||
{% elif column.name == 'type' or column.name == 'mtype' %}
|
{% elif column.name == 'type' or column.name == 'mtype' %}
|
||||||
<td class="{{ column.name }}">
|
<td class="{{ column.name }}">
|
||||||
<a
|
<a
|
||||||
@@ -249,7 +262,7 @@
|
|||||||
"channel": "{{ row.cells.channel|escapejs }}",
|
"channel": "{{ row.cells.channel|escapejs }}",
|
||||||
"time": "{{ row.cells.time|escapejs }}",
|
"time": "{{ row.cells.time|escapejs }}",
|
||||||
"date": "{{ row.cells.date|escapejs }}",
|
"date": "{{ row.cells.date|escapejs }}",
|
||||||
"index": "{{ params.index }}",
|
"index": "{% if row.cells.index != '—' %}{{row.cells.index}}{% else %}{{ params.index }}{% endif %}",
|
||||||
"type": "{{ row.cells.type }}",
|
"type": "{{ row.cells.type }}",
|
||||||
"mtype": "{{ row.cells.mtype }}",
|
"mtype": "{{ row.cells.mtype }}",
|
||||||
"nick": "{{ row.cells.nick|escapejs }}",
|
"nick": "{{ row.cells.nick|escapejs }}",
|
||||||
@@ -373,6 +386,32 @@
|
|||||||
{% endfor %}
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
|
{% elif column.name == "meta" %}
|
||||||
|
<td class="{{ column.name }}">
|
||||||
|
<pre class="small-field" style="cursor: pointer;">{{ cell|pretty }}</pre>
|
||||||
|
</td>
|
||||||
|
{% elif 'id' in column.name and column.name != "ident" %}
|
||||||
|
<td class="{{ column.name }}">
|
||||||
|
<div class="buttons">
|
||||||
|
<div class="nowrap-parent">
|
||||||
|
<!-- <input class="input" type="text" value="{{ cell }}" style="width: 50px;" readonly> -->
|
||||||
|
<a
|
||||||
|
class="has-text-grey button nowrap-child"
|
||||||
|
onclick="populateSearch('{{ column.name }}', '{{ cell|escapejs }}')">
|
||||||
|
<span class="icon" data-tooltip="Populate {{ cell }}">
|
||||||
|
<i class="fa-solid fa-arrow-left-long-to-line" aria-hidden="true"></i>
|
||||||
|
</span>
|
||||||
|
</a>
|
||||||
|
<a
|
||||||
|
class="has-text-grey button nowrap-child"
|
||||||
|
onclick="window.prompt('Copy to clipboard: Ctrl+C, Enter', '{{ cell|escapejs }}');">
|
||||||
|
<span class="icon" data-tooltip="Copy to clipboard">
|
||||||
|
<i class="fa-solid fa-copy" aria-hidden="true"></i>
|
||||||
|
</span>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
{% else %}
|
{% else %}
|
||||||
<td class="{{ column.name }}">
|
<td class="{{ column.name }}">
|
||||||
<a
|
<a
|
||||||
@@ -494,4 +533,5 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock pagination %}
|
{% endblock pagination %}
|
||||||
</div>
|
</div>
|
||||||
{% endblock table-wrapper %}
|
{% endblock table-wrapper %}
|
||||||
|
{% endcache %}
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
{% include 'partials/notify.html' %}
|
{% load cache %}
|
||||||
|
{% load cachalot cache %}
|
||||||
<table
|
{% get_last_invalidation 'core.NotificationRule' as last %}
|
||||||
|
{% include 'mixins/partials/notify.html' %}
|
||||||
|
{% cache 600 objects_rules request.user.id object_list last %}
|
||||||
|
<table
|
||||||
class="table is-fullwidth is-hoverable"
|
class="table is-fullwidth is-hoverable"
|
||||||
hx-target="#{{ context_object_name }}-table"
|
hx-target="#{{ context_object_name }}-table"
|
||||||
id="{{ context_object_name }}-table"
|
id="{{ context_object_name }}-table"
|
||||||
@@ -11,19 +14,25 @@
|
|||||||
<th>id</th>
|
<th>id</th>
|
||||||
<th>user</th>
|
<th>user</th>
|
||||||
<th>name</th>
|
<th>name</th>
|
||||||
|
<th>interval</th>
|
||||||
|
<th>window</th>
|
||||||
<th>priority</th>
|
<th>priority</th>
|
||||||
<th>topic</th>
|
<th>topic</th>
|
||||||
<th>enabled</th>
|
<th>enabled</th>
|
||||||
|
<th>ingest</th>
|
||||||
<th>data length</th>
|
<th>data length</th>
|
||||||
|
<th>match</th>
|
||||||
<th>actions</th>
|
<th>actions</th>
|
||||||
</thead>
|
</thead>
|
||||||
{% for item in object_list %}
|
{% for item in object_list %}
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ item.id }}</td>
|
<td><a href="/?query=*&source=all&rule={{ item.id }}">{{ item.id }}</a></td>
|
||||||
<td>{{ item.user }}</td>
|
<td>{{ item.user }}</td>
|
||||||
<td>{{ item.name }}</td>
|
<td>{{ item.name }}</td>
|
||||||
|
<td>{{ item.interval }}s</td>
|
||||||
|
<td>{{ item.window|default_if_none:"—" }}</td>
|
||||||
<td>{{ item.priority }}</td>
|
<td>{{ item.priority }}</td>
|
||||||
<td>{{ item.topic }}</td>
|
<td>{{ item.topic|default_if_none:"—" }}</td>
|
||||||
<td>
|
<td>
|
||||||
{% if item.enabled %}
|
{% if item.enabled %}
|
||||||
<span class="icon">
|
<span class="icon">
|
||||||
@@ -35,7 +44,19 @@
|
|||||||
</span>
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
|
<td>
|
||||||
|
{% if item.ingest %}
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fa-solid fa-check"></i>
|
||||||
|
</span>
|
||||||
|
{% else %}
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fa-solid fa-xmark"></i>
|
||||||
|
</span>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
<td>{{ item.data|length }}</td>
|
<td>{{ item.data|length }}</td>
|
||||||
|
<td>{{ item.matches }}</td>
|
||||||
<td>
|
<td>
|
||||||
<div class="buttons">
|
<div class="buttons">
|
||||||
<button
|
<button
|
||||||
@@ -65,9 +86,24 @@
|
|||||||
</span>
|
</span>
|
||||||
</span>
|
</span>
|
||||||
</button>
|
</button>
|
||||||
|
<button
|
||||||
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
|
hx-post="{% url 'rule_clear' type=type pk=item.id %}"
|
||||||
|
hx-trigger="click"
|
||||||
|
hx-target="#modals-here"
|
||||||
|
hx-swap="innerHTML"
|
||||||
|
hx-confirm="Are you sure you wish to clear matches for {{ item.name }}?"
|
||||||
|
class="button">
|
||||||
|
<span class="icon-text">
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fa-solid fa-arrow-rotate-right"></i>
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
</table>
|
</table>
|
||||||
|
{% endcache %}
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if params.index != 'int' and params.index != 'meta' %}
|
{% if params.index != 'int' and params.index != 'meta' %}
|
||||||
<div id="sentiment-container" {% if params.show_sentiment is None %} class="is-hidden" {% endif %}>
|
<div id="sentiment-container" {% if params.graph is None %} class="is-hidden" {% endif %}>
|
||||||
<canvas id="sentiment-chart"></canvas>
|
<canvas id="sentiment-chart"></canvas>
|
||||||
</div>
|
</div>
|
||||||
<script src="{% static 'chart.js' %}"></script>
|
<script src="{% static 'chart.js' %}"></script>
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
style="display: none;"
|
style="display: none;"
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
|
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
|
||||||
hx-post="{% url 'chans_insights' %}"
|
hx-post="{% url 'chans_insights' index=index %}"
|
||||||
hx-trigger="load"
|
hx-trigger="load"
|
||||||
hx-target="#channels"
|
hx-target="#channels"
|
||||||
hx-swap="outerHTML">
|
hx-swap="outerHTML">
|
||||||
@@ -13,13 +13,13 @@
|
|||||||
style="display: none;"
|
style="display: none;"
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
|
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}"}'
|
||||||
hx-post="{% url 'nicks_insights' %}"
|
hx-post="{% url 'nicks_insights' index=index %}"
|
||||||
hx-trigger="load"
|
hx-trigger="load"
|
||||||
hx-target="#nicks"
|
hx-target="#nicks"
|
||||||
hx-swap="outerHTML">
|
hx-swap="outerHTML">
|
||||||
</div>
|
</div>
|
||||||
<div id="info">
|
<div id="info">
|
||||||
{% include 'partials/notify.html' %}
|
{% include 'mixins/partials/notify.html' %}
|
||||||
{% if item is not None %}
|
{% if item is not None %}
|
||||||
<div class="content" style="max-height: 30em; overflow: auto;">
|
<div class="content" style="max-height: 30em; overflow: auto;">
|
||||||
<div class="table-container">
|
<div class="table-container">
|
||||||
@@ -81,7 +81,7 @@
|
|||||||
{% if item.src == 'irc' %}
|
{% if item.src == 'irc' %}
|
||||||
<button
|
<button
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
hx-post="{% url 'modal_insights' %}"
|
hx-post="{% url 'modal_insights' index=index %}"
|
||||||
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}", "channel": "{{ item.channel }}"}'
|
hx-vals='{"net": "{{ item.net }}", "nick": "{{ item.nick }}", "channel": "{{ item.channel }}"}'
|
||||||
hx-target="#modals-here"
|
hx-target="#modals-here"
|
||||||
hx-trigger="click"
|
hx-trigger="click"
|
||||||
|
|||||||
@@ -1,40 +1,8 @@
|
|||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
{% load static %}
|
{% load static %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
{% include 'partials/notify.html' %}
|
{% include 'mixins/partials/notify.html' %}
|
||||||
<script>
|
<script src="{% static 'tabs.js' %}"></script>
|
||||||
// tabbed browsing for the modal
|
|
||||||
function initTabs() {
|
|
||||||
TABS.forEach((tab) => {
|
|
||||||
tab.addEventListener('click', (e) => {
|
|
||||||
let selected = tab.getAttribute('data-tab');
|
|
||||||
updateActiveTab(tab);
|
|
||||||
updateActiveContent(selected);
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateActiveTab(selected) {
|
|
||||||
TABS.forEach((tab) => {
|
|
||||||
if (tab && tab.classList.contains(ACTIVE_CLASS)) {
|
|
||||||
tab.classList.remove(ACTIVE_CLASS);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
selected.classList.add(ACTIVE_CLASS);
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateActiveContent(selected) {
|
|
||||||
CONTENT.forEach((item) => {
|
|
||||||
if (item && item.classList.contains(ACTIVE_CLASS)) {
|
|
||||||
item.classList.remove(ACTIVE_CLASS);
|
|
||||||
}
|
|
||||||
let data = item.getAttribute('data-content');
|
|
||||||
if (data === selected) {
|
|
||||||
item.classList.add(ACTIVE_CLASS);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<style>
|
<style>
|
||||||
.icon { border-bottom: 0px !important;}
|
.icon { border-bottom: 0px !important;}
|
||||||
</style>
|
</style>
|
||||||
@@ -47,7 +15,7 @@
|
|||||||
{% csrf_token %}
|
{% csrf_token %}
|
||||||
<div class="field has-addons">
|
<div class="field has-addons">
|
||||||
<div class="control is-expanded has-icons-left">
|
<div class="control is-expanded has-icons-left">
|
||||||
<input id="query_full" name="query_full" class="input" type="text" placeholder="nickname">
|
<input id="query_full" name="query" class="input" type="text" placeholder="nickname">
|
||||||
<span class="icon is-small is-left">
|
<span class="icon is-small is-left">
|
||||||
<i class="fas fa-magnifying-glass"></i>
|
<i class="fas fa-magnifying-glass"></i>
|
||||||
</span>
|
</span>
|
||||||
@@ -55,7 +23,7 @@
|
|||||||
<div class="control">
|
<div class="control">
|
||||||
<button
|
<button
|
||||||
class="button is-info is-fullwidth"
|
class="button is-info is-fullwidth"
|
||||||
hx-post="{% url 'search_insights' %}"
|
hx-post="{% url 'search_insights' index=index %}"
|
||||||
hx-trigger="click"
|
hx-trigger="click"
|
||||||
hx-target="#info"
|
hx-target="#info"
|
||||||
hx-swap="outerHTML">
|
hx-swap="outerHTML">
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
style="display: none;"
|
style="display: none;"
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
||||||
hx-vals='{"net": "{{ net }}", "nicks": "{{ nicks }}"}'
|
hx-vals='{"net": "{{ net }}", "nicks": "{{ nicks }}"}'
|
||||||
hx-post="{% url 'meta_insights' %}"
|
hx-post="{% url 'meta_insights' index=index %}"
|
||||||
hx-trigger="load"
|
hx-trigger="load"
|
||||||
hx-target="#meta"
|
hx-target="#meta"
|
||||||
hx-swap="outerHTML">
|
hx-swap="outerHTML">
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{% extends 'wm/widget.html' %}
|
{% extends 'mixins/wm/widget.html' %}
|
||||||
|
|
||||||
|
|
||||||
{% block widget_options %}
|
{% block widget_options %}
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
{% include 'partials/notify.html' %}
|
|
||||||
{% if page_title is not None %}
|
|
||||||
<h1 class="title is-4">{{ page_title }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
{% if page_subtitle is not None %}
|
|
||||||
<h1 class="subtitle">{{ page_subtitle }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
{% load crispy_forms_tags %}
|
|
||||||
|
|
||||||
{% load crispy_forms_bulma_field %}
|
|
||||||
<form
|
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
|
||||||
hx-post="{{ submit_url }}"
|
|
||||||
hx-target="#modals-here"
|
|
||||||
hx-swap="innerHTML">
|
|
||||||
{% csrf_token %}
|
|
||||||
{{ form|crispy }}
|
|
||||||
{% if hide_cancel is not True %}
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
class="button is-light modal-close-button">
|
|
||||||
Cancel
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
<button type="submit" class="button modal-close-button">Submit</button>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
{% include 'partials/notify.html' %}
|
|
||||||
{% if page_title is not None %}
|
|
||||||
<h1 class="title is-4">{{ page_title }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
{% if page_subtitle is not None %}
|
|
||||||
<h1 class="subtitle">{{ page_subtitle }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
<div class="buttons">
|
|
||||||
|
|
||||||
{% if submit_url is not None %}
|
|
||||||
<button
|
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
|
||||||
hx-get="{{ submit_url }}"
|
|
||||||
hx-trigger="click"
|
|
||||||
hx-target="#modals-here"
|
|
||||||
class="button">
|
|
||||||
<span class="icon-text">
|
|
||||||
<span class="icon">
|
|
||||||
<i class="fa-solid fa-plus"></i>
|
|
||||||
</span>
|
|
||||||
<span>{{ title_singular }}</span>
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
{% if delete_all_url is not None %}
|
|
||||||
<button
|
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
|
||||||
hx-delete="{{ delete_all_url }}"
|
|
||||||
hx-trigger="click"
|
|
||||||
hx-target="#modals-here"
|
|
||||||
hx-swap="innerHTML"
|
|
||||||
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
|
|
||||||
class="button">
|
|
||||||
<span class="icon-text">
|
|
||||||
<span class="icon">
|
|
||||||
<i class="fa-solid fa-xmark"></i>
|
|
||||||
</span>
|
|
||||||
<span>Delete all {{ context_object_name }} </span>
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% include detail_template %}
|
|
||||||
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
{% include 'partials/notify.html' %}
|
|
||||||
{% if page_title is not None %}
|
|
||||||
<h1 class="title is-4">{{ page_title }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
{% if page_subtitle is not None %}
|
|
||||||
<h1 class="subtitle">{{ page_subtitle }}</h1>
|
|
||||||
{% endif %}
|
|
||||||
<div class="buttons">
|
|
||||||
|
|
||||||
{% if submit_url is not None %}
|
|
||||||
<button
|
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
|
||||||
hx-get="{{ submit_url }}"
|
|
||||||
hx-trigger="click"
|
|
||||||
hx-target="#modals-here"
|
|
||||||
class="button">
|
|
||||||
<span class="icon-text">
|
|
||||||
<span class="icon">
|
|
||||||
<i class="fa-solid fa-plus"></i>
|
|
||||||
</span>
|
|
||||||
<span>{{ title_singular }}</span>
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
{% if delete_all_url is not None %}
|
|
||||||
<button
|
|
||||||
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
|
|
||||||
hx-delete="{{ delete_all_url }}"
|
|
||||||
hx-trigger="click"
|
|
||||||
hx-target="#modals-here"
|
|
||||||
hx-swap="innerHTML"
|
|
||||||
hx-confirm="Are you sure you wish to delete all {{ context_object_name }}?"
|
|
||||||
class="button">
|
|
||||||
<span class="icon-text">
|
|
||||||
<span class="icon">
|
|
||||||
<i class="fa-solid fa-xmark"></i>
|
|
||||||
</span>
|
|
||||||
<span>Delete all {{ context_object_name }} </span>
|
|
||||||
</span>
|
|
||||||
</button>
|
|
||||||
{% endif %}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{% include list_template %}
|
|
||||||
|
|
||||||
@@ -1,13 +1,15 @@
|
|||||||
{% load static %}
|
{% load static %}
|
||||||
|
|
||||||
{% include 'partials/notify.html' %}
|
{% include 'mixins/partials/notify.html' %}
|
||||||
{% if cache is not None %}
|
{% if cache is not None %}
|
||||||
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
|
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
|
||||||
<i class="fa-solid fa-database"></i>
|
<i class="fa-solid fa-database"></i>
|
||||||
</span>
|
</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
fetched {{ table.data|length }} hits in {{ took }}ms
|
fetched {{ table.data|length }}
|
||||||
|
{% if params.rule is None %} hits {% else %} rule hits for {{ params.rule }}{% endif %}
|
||||||
|
in {{ took }}ms
|
||||||
|
|
||||||
{% if exemption is not None %}
|
{% if exemption is not None %}
|
||||||
<span class="icon has-tooltip-bottom" data-tooltip="God mode">
|
<span class="icon has-tooltip-bottom" data-tooltip="God mode">
|
||||||
|
|||||||
@@ -258,7 +258,7 @@
|
|||||||
id="sentiment_graph_switch"
|
id="sentiment_graph_switch"
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
class="switch is-rounded is-info"
|
class="switch is-rounded is-info"
|
||||||
name="show_sentiment"
|
name="graph"
|
||||||
data-script="on click toggle .is-hidden on #sentiment-container">
|
data-script="on click toggle .is-hidden on #sentiment-container">
|
||||||
|
|
||||||
<label
|
<label
|
||||||
@@ -341,6 +341,8 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{% if params.rule is None %}
|
||||||
<div class="column is-narrow rounded-tooltip">
|
<div class="column is-narrow rounded-tooltip">
|
||||||
<div class="field has-addons">
|
<div class="field has-addons">
|
||||||
<div class="control has-icons-left">
|
<div class="control has-icons-left">
|
||||||
@@ -388,6 +390,8 @@
|
|||||||
<span class="tooltiptext tag is-danger is-light">No access</span>
|
<span class="tooltiptext tag is-danger is-light">No access</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</div>
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="block">
|
<div class="block">
|
||||||
@@ -404,4 +408,9 @@
|
|||||||
value="{{ params.tags }}">
|
value="{{ params.tags }}">
|
||||||
</div>
|
</div>
|
||||||
<div class="is-hidden"></div>
|
<div class="is-hidden"></div>
|
||||||
|
{% if params.rule is not None %}
|
||||||
|
<div style="display:none;">
|
||||||
|
<input name="rule" value="{{ params.rule }}">
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
</form>
|
</form>
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
{% extends 'wm/window.html' %}
|
{% extends 'mixins/wm/window.html' %}
|
||||||
|
|
||||||
{% block heading %}
|
{% block heading %}
|
||||||
Drilldown
|
Drilldown
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
{% load static %}
|
|
||||||
|
|
||||||
<script src="{% static 'modal.js' %}"></script>
|
|
||||||
{% block scripts %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block styles %}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
<div id="modal" class="modal is-active is-clipped">
|
|
||||||
<div class="modal-background"></div>
|
|
||||||
<div class="modal-content">
|
|
||||||
<div class="box">
|
|
||||||
{% block modal_content %}
|
|
||||||
{% include window_content %}
|
|
||||||
{% endblock %}
|
|
||||||
{% include 'partials/close-modal.html' %}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
{% include window_content %}
|
|
||||||
{% endblock %}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
|
|
||||||
<nav class="panel">
|
|
||||||
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
|
|
||||||
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
|
|
||||||
{% block close_button %}
|
|
||||||
{% include 'partials/close-window.html' %}
|
|
||||||
{% endblock %}
|
|
||||||
{% block heading %}
|
|
||||||
{% endblock %}
|
|
||||||
</p>
|
|
||||||
<article class="panel-block is-active">
|
|
||||||
<div class="control">
|
|
||||||
{% block panel_content %}
|
|
||||||
{% endblock %}
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</nav>
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
<div id="widget">
|
|
||||||
<div id="widget-{{ unique }}" class="grid-stack-item" {% block widget_options %}gs-w="10" gs-h="1" gs-y="10" gs-x="1"{% endblock %}>
|
|
||||||
<div class="grid-stack-item-content">
|
|
||||||
|
|
||||||
<nav class="panel">
|
|
||||||
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
|
|
||||||
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
|
|
||||||
{% block close_button %}
|
|
||||||
{% include 'partials/close-widget.html' %}
|
|
||||||
{% endblock %}
|
|
||||||
<i
|
|
||||||
class="fa-solid fa-arrows-minimize has-text-grey-light float-right"
|
|
||||||
onclick='grid.compact();'></i>
|
|
||||||
{% block heading %}
|
|
||||||
{{ title }}
|
|
||||||
{% endblock %}
|
|
||||||
</p>
|
|
||||||
<article class="panel-block is-active">
|
|
||||||
<div class="control">
|
|
||||||
{% block panel_content %}
|
|
||||||
{% include window_content %}
|
|
||||||
{% endblock %}
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
{% block custom_script %}
|
|
||||||
{% endblock %}
|
|
||||||
var widget_event = new Event('load-widget');
|
|
||||||
document.dispatchEvent(widget_event);
|
|
||||||
</script>
|
|
||||||
{% block custom_end %}
|
|
||||||
{% endblock %}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
<magnet-block attract-distance="10" align-to="outer|center" class="floating-window">
|
|
||||||
{% extends 'wm/panel.html' %}
|
|
||||||
{% block heading %}
|
|
||||||
{{ title }}
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block panel_content %}
|
|
||||||
{% include window_content %}
|
|
||||||
{% endblock %}
|
|
||||||
</magnet-block>
|
|
||||||
15
core/templatetags/pretty.py
Normal file
15
core/templatetags/pretty.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import orjson
|
||||||
|
from django import template
|
||||||
|
|
||||||
|
register = template.Library()
|
||||||
|
|
||||||
|
|
||||||
|
@register.filter
|
||||||
|
def pretty(data):
|
||||||
|
prettified = orjson.dumps(data, option=orjson.OPT_INDENT_2).decode("utf-8")
|
||||||
|
if prettified.startswith("{"):
|
||||||
|
prettified = prettified[1:]
|
||||||
|
if prettified.endswith("}"):
|
||||||
|
prettified = prettified[:-1]
|
||||||
|
|
||||||
|
return prettified
|
||||||
10
core/templatetags/splitstr.py
Normal file
10
core/templatetags/splitstr.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from django import template
|
||||||
|
|
||||||
|
register = template.Library()
|
||||||
|
|
||||||
|
|
||||||
|
@register.filter
|
||||||
|
def splitstr(value, arg):
|
||||||
|
if type(value) == int:
|
||||||
|
raise Exception(f"Attempt to split {value} with separator {arg}")
|
||||||
|
return value.split(arg)
|
||||||
@@ -43,7 +43,6 @@ class ColoredFormatter(logging.Formatter):
|
|||||||
|
|
||||||
|
|
||||||
def get_logger(name):
|
def get_logger(name):
|
||||||
|
|
||||||
# Define the logging format
|
# Define the logging format
|
||||||
FORMAT = "%(asctime)s %(levelname)18s $BOLD%(name)13s$RESET - %(message)s"
|
FORMAT = "%(asctime)s %(levelname)18s $BOLD%(name)13s$RESET - %(message)s"
|
||||||
COLOR_FORMAT = formatter_message(FORMAT, True)
|
COLOR_FORMAT = formatter_message(FORMAT, True)
|
||||||
|
|||||||
@@ -1,753 +0,0 @@
|
|||||||
# import re
|
|
||||||
# from base64 import b64encode
|
|
||||||
# from random import randint
|
|
||||||
|
|
||||||
# from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
|
|
||||||
# from cryptography.hazmat.primitives.ciphers.modes import ECB
|
|
||||||
# from django.conf import settings
|
|
||||||
# from siphashc import siphash
|
|
||||||
# from sortedcontainers import SortedSet
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# from core import r
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
|
||||||
from django.core.paginator import Paginator
|
|
||||||
from django.db.models import QuerySet
|
|
||||||
from django.http import Http404, HttpResponse, HttpResponseBadRequest
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.views.generic.detail import DetailView
|
|
||||||
from django.views.generic.edit import CreateView, DeleteView, UpdateView
|
|
||||||
from django.views.generic.list import ListView
|
|
||||||
from rest_framework.parsers import FormParser
|
|
||||||
|
|
||||||
from core.util import logs
|
|
||||||
|
|
||||||
log = logs.get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RestrictedViewMixin:
|
|
||||||
"""
|
|
||||||
This mixin overrides two helpers in order to pass the user object to the filters.
|
|
||||||
get_queryset alters the objects returned for list views.
|
|
||||||
get_form_kwargs passes the request object to the form class. Remaining permissions
|
|
||||||
checks are in forms.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
allow_empty = True
|
|
||||||
queryset = None
|
|
||||||
model = None
|
|
||||||
paginate_by = None
|
|
||||||
paginate_orphans = 0
|
|
||||||
context_object_name = None
|
|
||||||
paginator_class = Paginator
|
|
||||||
page_kwarg = "page"
|
|
||||||
ordering = None
|
|
||||||
|
|
||||||
def get_queryset(self, **kwargs):
|
|
||||||
"""
|
|
||||||
This function is overriden to filter the objects by the requesting user.
|
|
||||||
"""
|
|
||||||
if self.queryset is not None:
|
|
||||||
queryset = self.queryset
|
|
||||||
if isinstance(queryset, QuerySet):
|
|
||||||
# queryset = queryset.all()
|
|
||||||
queryset = queryset.filter(user=self.request.user)
|
|
||||||
elif self.model is not None:
|
|
||||||
queryset = self.model._default_manager.filter(user=self.request.user)
|
|
||||||
else:
|
|
||||||
raise ImproperlyConfigured(
|
|
||||||
"%(cls)s is missing a QuerySet. Define "
|
|
||||||
"%(cls)s.model, %(cls)s.queryset, or override "
|
|
||||||
"%(cls)s.get_queryset()." % {"cls": self.__class__.__name__}
|
|
||||||
)
|
|
||||||
if hasattr(self, "get_ordering"):
|
|
||||||
ordering = self.get_ordering()
|
|
||||||
if ordering:
|
|
||||||
if isinstance(ordering, str):
|
|
||||||
ordering = (ordering,)
|
|
||||||
queryset = queryset.order_by(*ordering)
|
|
||||||
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
def get_form_kwargs(self):
|
|
||||||
"""Passes the request object to the form class.
|
|
||||||
This is necessary to only display members that belong to a given user"""
|
|
||||||
|
|
||||||
kwargs = super().get_form_kwargs()
|
|
||||||
kwargs["request"] = self.request
|
|
||||||
return kwargs
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectNameMixin(object):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if self.model is None:
|
|
||||||
self.title = self.context_object_name.title()
|
|
||||||
self.title_singular = self.context_object_name_singular.title()
|
|
||||||
else:
|
|
||||||
self.title_singular = self.model._meta.verbose_name.title() # Hook
|
|
||||||
self.context_object_name_singular = self.title_singular.lower() # hook
|
|
||||||
self.title = self.model._meta.verbose_name_plural.title() # Hooks
|
|
||||||
self.context_object_name = self.title.lower() # hooks
|
|
||||||
|
|
||||||
self.context_object_name = self.context_object_name.replace(" ", "")
|
|
||||||
self.context_object_name_singular = (
|
|
||||||
self.context_object_name_singular.replace(" ", "")
|
|
||||||
)
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectList(RestrictedViewMixin, ObjectNameMixin, ListView):
|
|
||||||
allowed_types = ["modal", "widget", "window", "page"]
|
|
||||||
window_content = "window-content/objects.html"
|
|
||||||
list_template = None
|
|
||||||
|
|
||||||
page_title = None
|
|
||||||
page_subtitle = None
|
|
||||||
|
|
||||||
list_url_name = None
|
|
||||||
# WARNING: TAKEN FROM locals()
|
|
||||||
list_url_args = ["type"]
|
|
||||||
|
|
||||||
submit_url_name = None
|
|
||||||
|
|
||||||
delete_all_url_name = None
|
|
||||||
widget_options = None
|
|
||||||
|
|
||||||
# copied from BaseListView
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
type = kwargs.get("type", None)
|
|
||||||
if not type:
|
|
||||||
return HttpResponseBadRequest("No type specified")
|
|
||||||
if type not in self.allowed_types:
|
|
||||||
return HttpResponseBadRequest("Invalid type specified")
|
|
||||||
|
|
||||||
self.request = request
|
|
||||||
self.object_list = self.get_queryset(**kwargs)
|
|
||||||
if isinstance(self.object_list, HttpResponse):
|
|
||||||
return self.object_list
|
|
||||||
if isinstance(self.object_list, HttpResponseBadRequest):
|
|
||||||
return self.object_list
|
|
||||||
allow_empty = self.get_allow_empty()
|
|
||||||
|
|
||||||
self.template_name = f"wm/{type}.html"
|
|
||||||
unique = str(uuid.uuid4())[:8]
|
|
||||||
|
|
||||||
list_url_args = {}
|
|
||||||
for arg in self.list_url_args:
|
|
||||||
if arg in locals():
|
|
||||||
list_url_args[arg] = locals()[arg]
|
|
||||||
elif arg in kwargs:
|
|
||||||
list_url_args[arg] = kwargs[arg]
|
|
||||||
|
|
||||||
orig_type = type
|
|
||||||
if type == "page":
|
|
||||||
type = "modal"
|
|
||||||
|
|
||||||
if not allow_empty:
|
|
||||||
# When pagination is enabled and object_list is a queryset,
|
|
||||||
# it's better to do a cheap query than to load the unpaginated
|
|
||||||
# queryset in memory.
|
|
||||||
if self.get_paginate_by(self.object_list) is not None and hasattr(
|
|
||||||
self.object_list, "exists"
|
|
||||||
):
|
|
||||||
is_empty = not self.object_list.exists()
|
|
||||||
else:
|
|
||||||
is_empty = not self.object_list
|
|
||||||
if is_empty:
|
|
||||||
raise Http404("Empty list")
|
|
||||||
|
|
||||||
context = self.get_context_data()
|
|
||||||
context["title"] = self.title + f" ({type})"
|
|
||||||
context["title_singular"] = self.title_singular
|
|
||||||
context["unique"] = unique
|
|
||||||
context["window_content"] = self.window_content
|
|
||||||
context["list_template"] = self.list_template
|
|
||||||
context["page_title"] = self.page_title
|
|
||||||
context["page_subtitle"] = self.page_subtitle
|
|
||||||
context["type"] = type
|
|
||||||
context["context_object_name"] = self.context_object_name
|
|
||||||
context["context_object_name_singular"] = self.context_object_name_singular
|
|
||||||
|
|
||||||
if self.submit_url_name is not None:
|
|
||||||
context["submit_url"] = reverse(self.submit_url_name, kwargs={"type": type})
|
|
||||||
|
|
||||||
if self.list_url_name is not None:
|
|
||||||
context["list_url"] = reverse(self.list_url_name, kwargs=list_url_args)
|
|
||||||
|
|
||||||
if self.delete_all_url_name:
|
|
||||||
context["delete_all_url"] = reverse(self.delete_all_url_name)
|
|
||||||
if self.widget_options:
|
|
||||||
context["widget_options"] = self.widget_options
|
|
||||||
|
|
||||||
# Return partials for HTMX
|
|
||||||
if self.request.htmx:
|
|
||||||
if request.headers["HX-Target"] == self.context_object_name + "-table":
|
|
||||||
self.template_name = self.list_template
|
|
||||||
elif orig_type == "page":
|
|
||||||
self.template_name = self.list_template
|
|
||||||
else:
|
|
||||||
context["window_content"] = self.list_template
|
|
||||||
return self.render_to_response(context)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectCreate(RestrictedViewMixin, ObjectNameMixin, CreateView):
|
|
||||||
allowed_types = ["modal", "widget", "window", "page"]
|
|
||||||
window_content = "window-content/object-form.html"
|
|
||||||
parser_classes = [FormParser]
|
|
||||||
|
|
||||||
page_title = None
|
|
||||||
page_subtitle = None
|
|
||||||
|
|
||||||
model = None
|
|
||||||
submit_url_name = None
|
|
||||||
submit_url_args = ["type"]
|
|
||||||
|
|
||||||
request = None
|
|
||||||
|
|
||||||
# Whether to hide the cancel button in the form
|
|
||||||
hide_cancel = False
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.title = "Create " + self.context_object_name_singular
|
|
||||||
|
|
||||||
def post_save(self, obj):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def form_valid(self, form):
|
|
||||||
obj = form.save(commit=False)
|
|
||||||
if self.request is None:
|
|
||||||
raise Exception("Request is None")
|
|
||||||
obj.user = self.request.user
|
|
||||||
obj.save()
|
|
||||||
form.save_m2m()
|
|
||||||
self.post_save(obj)
|
|
||||||
context = {"message": "Object created", "class": "success"}
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
def form_invalid(self, form):
|
|
||||||
"""If the form is invalid, render the invalid form."""
|
|
||||||
return self.get(self.request, **self.kwargs, form=form)
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
type = kwargs.get("type", None)
|
|
||||||
if not type:
|
|
||||||
return HttpResponseBadRequest("No type specified")
|
|
||||||
if type not in self.allowed_types:
|
|
||||||
return HttpResponseBadRequest("Invalid type specified")
|
|
||||||
self.template_name = f"wm/{type}.html"
|
|
||||||
unique = str(uuid.uuid4())[:8]
|
|
||||||
|
|
||||||
self.request = request
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
if type == "widget":
|
|
||||||
self.hide_cancel = True
|
|
||||||
|
|
||||||
if type == "page":
|
|
||||||
type = "modal"
|
|
||||||
|
|
||||||
self.object = None
|
|
||||||
|
|
||||||
submit_url_args = {}
|
|
||||||
for arg in self.submit_url_args:
|
|
||||||
if arg in locals():
|
|
||||||
submit_url_args[arg] = locals()[arg]
|
|
||||||
elif arg in kwargs:
|
|
||||||
submit_url_args[arg] = kwargs[arg]
|
|
||||||
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
|
|
||||||
|
|
||||||
context = self.get_context_data()
|
|
||||||
form = kwargs.get("form", None)
|
|
||||||
if form:
|
|
||||||
context["form"] = form
|
|
||||||
context["unique"] = unique
|
|
||||||
context["window_content"] = self.window_content
|
|
||||||
context["context_object_name"] = self.context_object_name
|
|
||||||
context["context_object_name_singular"] = self.context_object_name_singular
|
|
||||||
context["submit_url"] = submit_url
|
|
||||||
context["type"] = type
|
|
||||||
context["hide_cancel"] = self.hide_cancel
|
|
||||||
if self.page_title:
|
|
||||||
context["page_title"] = self.page_title
|
|
||||||
if self.page_subtitle:
|
|
||||||
context["page_subtitle"] = self.page_subtitle
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
self.request = request
|
|
||||||
self.template_name = "partials/notify.html"
|
|
||||||
return super().post(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectRead(RestrictedViewMixin, ObjectNameMixin, DetailView):
|
|
||||||
allowed_types = ["modal", "widget", "window", "page"]
|
|
||||||
window_content = "window-content/object.html"
|
|
||||||
detail_template = "partials/generic-detail.html"
|
|
||||||
|
|
||||||
page_title = None
|
|
||||||
page_subtitle = None
|
|
||||||
|
|
||||||
model = None
|
|
||||||
# submit_url_name = None
|
|
||||||
|
|
||||||
detail_url_name = None
|
|
||||||
# WARNING: TAKEN FROM locals()
|
|
||||||
detail_url_args = ["type"]
|
|
||||||
|
|
||||||
request = None
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
type = kwargs.get("type", None)
|
|
||||||
if not type:
|
|
||||||
return HttpResponseBadRequest("No type specified")
|
|
||||||
if type not in self.allowed_types:
|
|
||||||
return HttpResponseBadRequest()
|
|
||||||
self.template_name = f"wm/{type}.html"
|
|
||||||
unique = str(uuid.uuid4())[:8]
|
|
||||||
|
|
||||||
detail_url_args = {}
|
|
||||||
for arg in self.detail_url_args:
|
|
||||||
if arg in locals():
|
|
||||||
detail_url_args[arg] = locals()[arg]
|
|
||||||
elif arg in kwargs:
|
|
||||||
detail_url_args[arg] = kwargs[arg]
|
|
||||||
|
|
||||||
self.request = request
|
|
||||||
self.object = self.get_object(**kwargs)
|
|
||||||
if isinstance(self.object, HttpResponse):
|
|
||||||
return self.object
|
|
||||||
|
|
||||||
orig_type = type
|
|
||||||
if type == "page":
|
|
||||||
type = "modal"
|
|
||||||
|
|
||||||
context = self.get_context_data()
|
|
||||||
|
|
||||||
context["title"] = self.title + f" ({type})"
|
|
||||||
context["title_singular"] = self.title_singular
|
|
||||||
context["unique"] = unique
|
|
||||||
context["window_content"] = self.window_content
|
|
||||||
context["detail_template"] = self.detail_template
|
|
||||||
if self.page_title:
|
|
||||||
context["page_title"] = self.page_title
|
|
||||||
if self.page_subtitle:
|
|
||||||
context["page_subtitle"] = self.page_subtitle
|
|
||||||
context["type"] = type
|
|
||||||
context["context_object_name"] = self.context_object_name
|
|
||||||
context["context_object_name_singular"] = self.context_object_name_singular
|
|
||||||
|
|
||||||
if self.detail_url_name is not None:
|
|
||||||
context["detail_url"] = reverse(
|
|
||||||
self.detail_url_name, kwargs=detail_url_args
|
|
||||||
)
|
|
||||||
|
|
||||||
# Return partials for HTMX
|
|
||||||
if self.request.htmx:
|
|
||||||
if request.headers["HX-Target"] == self.context_object_name + "-info":
|
|
||||||
self.template_name = self.detail_template
|
|
||||||
elif orig_type == "page":
|
|
||||||
self.template_name = self.detail_template
|
|
||||||
else:
|
|
||||||
context["window_content"] = self.detail_template
|
|
||||||
|
|
||||||
return self.render_to_response(context)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectUpdate(RestrictedViewMixin, ObjectNameMixin, UpdateView):
|
|
||||||
allowed_types = ["modal", "widget", "window", "page"]
|
|
||||||
window_content = "window-content/object-form.html"
|
|
||||||
parser_classes = [FormParser]
|
|
||||||
|
|
||||||
page_title = None
|
|
||||||
page_subtitle = None
|
|
||||||
|
|
||||||
model = None
|
|
||||||
submit_url_name = None
|
|
||||||
submit_url_args = ["type", "pk"]
|
|
||||||
|
|
||||||
request = None
|
|
||||||
|
|
||||||
# Whether pk is required in the get request
|
|
||||||
pk_required = True
|
|
||||||
|
|
||||||
# Whether to hide the cancel button in the form
|
|
||||||
hide_cancel = False
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.title = "Update " + self.context_object_name_singular
|
|
||||||
|
|
||||||
def post_save(self, obj):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def form_valid(self, form):
|
|
||||||
obj = form.save(commit=False)
|
|
||||||
if self.request is None:
|
|
||||||
raise Exception("Request is None")
|
|
||||||
obj.save()
|
|
||||||
form.save_m2m()
|
|
||||||
self.post_save(obj)
|
|
||||||
context = {"message": "Object updated", "class": "success"}
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
def form_invalid(self, form):
|
|
||||||
"""If the form is invalid, render the invalid form."""
|
|
||||||
return self.get(self.request, **self.kwargs, form=form)
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
|
||||||
self.request = request
|
|
||||||
type = kwargs.get("type", None)
|
|
||||||
pk = kwargs.get("pk", None)
|
|
||||||
if not type:
|
|
||||||
return HttpResponseBadRequest("No type specified")
|
|
||||||
if not pk:
|
|
||||||
if self.pk_required:
|
|
||||||
return HttpResponseBadRequest("No pk specified")
|
|
||||||
if type not in self.allowed_types:
|
|
||||||
return HttpResponseBadRequest("Invalid type specified")
|
|
||||||
self.template_name = f"wm/{type}.html"
|
|
||||||
unique = str(uuid.uuid4())[:8]
|
|
||||||
if type == "widget":
|
|
||||||
self.hide_cancel = True
|
|
||||||
|
|
||||||
if type == "page":
|
|
||||||
type = "modal"
|
|
||||||
|
|
||||||
self.object = self.get_object()
|
|
||||||
|
|
||||||
submit_url_args = {}
|
|
||||||
for arg in self.submit_url_args:
|
|
||||||
if arg in locals():
|
|
||||||
submit_url_args[arg] = locals()[arg]
|
|
||||||
elif arg in kwargs:
|
|
||||||
submit_url_args[arg] = kwargs[arg]
|
|
||||||
submit_url = reverse(self.submit_url_name, kwargs=submit_url_args)
|
|
||||||
|
|
||||||
context = self.get_context_data()
|
|
||||||
form = kwargs.get("form", None)
|
|
||||||
if form:
|
|
||||||
context["form"] = form
|
|
||||||
context["title"] = self.title + f" ({type})"
|
|
||||||
context["title_singular"] = self.title_singular
|
|
||||||
context["unique"] = unique
|
|
||||||
context["window_content"] = self.window_content
|
|
||||||
context["context_object_name"] = self.context_object_name
|
|
||||||
context["context_object_name_singular"] = self.context_object_name_singular
|
|
||||||
context["submit_url"] = submit_url
|
|
||||||
context["type"] = type
|
|
||||||
context["hide_cancel"] = self.hide_cancel
|
|
||||||
if self.page_title:
|
|
||||||
context["page_title"] = self.page_title
|
|
||||||
if self.page_subtitle:
|
|
||||||
context["page_subtitle"] = self.page_subtitle
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
# response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
|
||||||
self.request = request
|
|
||||||
self.template_name = "partials/notify.html"
|
|
||||||
return super().post(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectDelete(RestrictedViewMixin, ObjectNameMixin, DeleteView):
|
|
||||||
model = None
|
|
||||||
template_name = "partials/notify.html"
|
|
||||||
|
|
||||||
# Overriden to prevent success URL from being used
|
|
||||||
def delete(self, request, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Call the delete() method on the fetched object and then redirect to the
|
|
||||||
success URL.
|
|
||||||
"""
|
|
||||||
self.object = self.get_object()
|
|
||||||
# success_url = self.get_success_url()
|
|
||||||
self.object.delete()
|
|
||||||
context = {"message": "Object deleted", "class": "success"}
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
# This will be used in newer Django versions, until then we get a warning
|
|
||||||
def form_valid(self, form):
|
|
||||||
"""
|
|
||||||
Call the delete() method on the fetched object.
|
|
||||||
"""
|
|
||||||
self.object = self.get_object()
|
|
||||||
self.object.delete()
|
|
||||||
context = {"message": "Object deleted", "class": "success"}
|
|
||||||
response = self.render_to_response(context)
|
|
||||||
response["HX-Trigger"] = f"{self.context_object_name_singular}Event"
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
# from random import randint
|
|
||||||
# from timeit import timeit
|
|
||||||
# entries = 10000
|
|
||||||
# a = [
|
|
||||||
# {'ts': "sss", 'msg': randint(1, 2), str(randint(1, 2)): \
|
|
||||||
# randint(1, 2)} for x in range(entries)
|
|
||||||
# ]
|
|
||||||
# kk = ["msg", "nick"]
|
|
||||||
# call = lambda: dedup_list(a, kk)
|
|
||||||
# #print(timeit(call, number=10))
|
|
||||||
# print(dedup_list(a, kk))
|
|
||||||
|
|
||||||
# # sh-5.1$ python helpers.py
|
|
||||||
# # 1.0805372429895215
|
|
||||||
|
|
||||||
|
|
||||||
# def base36encode(number, alphabet="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
|
|
||||||
# """Converts an integer to a base36 string."""
|
|
||||||
# if not isinstance(number, (int)):
|
|
||||||
# raise TypeError("number must be an integer")
|
|
||||||
|
|
||||||
# base36 = ""
|
|
||||||
# sign = ""
|
|
||||||
|
|
||||||
# if number < 0:
|
|
||||||
# sign = "-"
|
|
||||||
# number = -number
|
|
||||||
|
|
||||||
# if 0 <= number < len(alphabet):
|
|
||||||
# return sign + alphabet[number]
|
|
||||||
|
|
||||||
# while number != 0:
|
|
||||||
# number, i = divmod(number, len(alphabet))
|
|
||||||
# base36 = alphabet[i] + base36
|
|
||||||
|
|
||||||
# return sign + base36
|
|
||||||
|
|
||||||
|
|
||||||
# def base36decode(number):
|
|
||||||
# return int(number, 36)
|
|
||||||
|
|
||||||
|
|
||||||
# def randomise_list(user, data):
|
|
||||||
# """
|
|
||||||
# Randomise data in a list of dictionaries.
|
|
||||||
# """
|
|
||||||
# if user.has_perm("core.bypass_randomisation"):
|
|
||||||
# return
|
|
||||||
# if isinstance(data, list):
|
|
||||||
# for index, item in enumerate(data):
|
|
||||||
# for key, value in item.items():
|
|
||||||
# if key in settings.RANDOMISE_FIELDS:
|
|
||||||
# if isinstance(value, int):
|
|
||||||
# min_val = value - (value * settings.RANDOMISE_RATIO)
|
|
||||||
# max_val = value + (value * settings.RANDOMISE_RATIO)
|
|
||||||
# new_val = randint(int(min_val), int(max_val))
|
|
||||||
# data[index][key] = new_val
|
|
||||||
# elif isinstance(data, dict):
|
|
||||||
# for key, value in data.items():
|
|
||||||
# # if key in settings.RANDOMISE_FIELDS:
|
|
||||||
# if isinstance(value, int):
|
|
||||||
# min_val = value - (value * settings.RANDOMISE_RATIO)
|
|
||||||
# max_val = value + (value * settings.RANDOMISE_RATIO)
|
|
||||||
# new_val = randint(int(min_val), int(max_val))
|
|
||||||
# data[key] = new_val
|
|
||||||
|
|
||||||
|
|
||||||
# def obfuscate_list(user, data):
|
|
||||||
# """
|
|
||||||
# Obfuscate data in a list of dictionaries.
|
|
||||||
# """
|
|
||||||
# if user.has_perm("core.bypass_obfuscation"):
|
|
||||||
# return
|
|
||||||
# for index, item in enumerate(data):
|
|
||||||
# for key, value in item.items():
|
|
||||||
# # Obfuscate a ratio of the field
|
|
||||||
# if key in settings.OBFUSCATE_FIELDS:
|
|
||||||
# length = len(value) - 1
|
|
||||||
# split = int(length * settings.OBFUSCATE_KEEP_RATIO)
|
|
||||||
# first_part = value[:split]
|
|
||||||
# second_part = value[split:]
|
|
||||||
# second_len = len(second_part)
|
|
||||||
# second_part = "*" * second_len
|
|
||||||
# data[index][key] = first_part + second_part
|
|
||||||
# # Obfuscate value based on fields
|
|
||||||
# # Example: 2022-02-02 -> 2022-02-**
|
|
||||||
# # 14:11:12 -> 14:11:**
|
|
||||||
# elif key in settings.OBFUSCATE_FIELDS_SEP:
|
|
||||||
# if "-" in value:
|
|
||||||
# sep = "-"
|
|
||||||
# value_spl = value.split("-")
|
|
||||||
# hide_num = settings.OBFUSCATE_DASH_NUM
|
|
||||||
# elif ":" in value:
|
|
||||||
# sep = ":"
|
|
||||||
# value_spl = value.split(":")
|
|
||||||
# hide_num = settings.OBFUSCATE_COLON_NUM
|
|
||||||
|
|
||||||
# first_part = value_spl[:hide_num]
|
|
||||||
# second_part = value_spl[hide_num:]
|
|
||||||
# for index_x, x in enumerate(second_part):
|
|
||||||
# x_len = len(x)
|
|
||||||
# second_part[index_x] = "*" * x_len
|
|
||||||
# result = sep.join([*first_part, *second_part])
|
|
||||||
# data[index][key] = result
|
|
||||||
# for key in settings.COMBINE_FIELDS:
|
|
||||||
# for index, item in enumerate(data):
|
|
||||||
# if key in item:
|
|
||||||
# k1, k2 = settings.COMBINE_FIELDS[key]
|
|
||||||
# if k1 in item and k2 in item:
|
|
||||||
# data[index][key] = item[k1] + item[k2]
|
|
||||||
|
|
||||||
|
|
||||||
# def hash_list(user, data, hash_keys=False):
|
|
||||||
# """
|
|
||||||
# Hash a list of dicts or a list with SipHash42.
|
|
||||||
# """
|
|
||||||
# if user.has_perm("core.bypass_hashing"):
|
|
||||||
# return
|
|
||||||
# cache = "cache.hash"
|
|
||||||
# hash_table = {}
|
|
||||||
# if isinstance(data, dict):
|
|
||||||
# data_copy = [{x: data[x]} for x in data]
|
|
||||||
# else:
|
|
||||||
# data_copy = type(data)((data))
|
|
||||||
# for index, item in enumerate(data_copy):
|
|
||||||
# if "src" in item:
|
|
||||||
# if item["src"] in settings.SAFE_SOURCES:
|
|
||||||
# continue
|
|
||||||
# if isinstance(item, dict):
|
|
||||||
# for key, value in list(item.items()):
|
|
||||||
# if (
|
|
||||||
# key not in settings.WHITELIST_FIELDS
|
|
||||||
# and key not in settings.NO_OBFUSCATE_PARAMS
|
|
||||||
# ):
|
|
||||||
# if isinstance(value, int):
|
|
||||||
# value = str(value)
|
|
||||||
# if isinstance(value, bool):
|
|
||||||
# continue
|
|
||||||
# if value is None:
|
|
||||||
# continue
|
|
||||||
# if hash_keys:
|
|
||||||
# hashed = siphash(settings.HASHING_KEY, key)
|
|
||||||
# else:
|
|
||||||
# hashed = siphash(settings.HASHING_KEY, value)
|
|
||||||
# encoded = base36encode(hashed)
|
|
||||||
# if encoded not in hash_table:
|
|
||||||
# if hash_keys:
|
|
||||||
# hash_table[encoded] = key
|
|
||||||
# else:
|
|
||||||
# hash_table[encoded] = value
|
|
||||||
# if hash_keys:
|
|
||||||
# # Rename the dict key
|
|
||||||
# data[encoded] = data.pop(key)
|
|
||||||
# else:
|
|
||||||
# data[index][key] = encoded
|
|
||||||
# elif isinstance(item, str):
|
|
||||||
# hashed = siphash(settings.HASHING_KEY, item)
|
|
||||||
# encoded = base36encode(hashed)
|
|
||||||
# if encoded not in hash_table:
|
|
||||||
# hash_table[encoded] = item
|
|
||||||
# data[index] = encoded
|
|
||||||
# if hash_table:
|
|
||||||
# r.hmset(cache, hash_table)
|
|
||||||
|
|
||||||
|
|
||||||
# def hash_lookup(user, data_dict, supplementary_data=None):
|
|
||||||
# cache = "cache.hash"
|
|
||||||
# hash_list = SortedSet()
|
|
||||||
# denied = []
|
|
||||||
# for key, value in list(data_dict.items()):
|
|
||||||
# if "source" in data_dict:
|
|
||||||
# if data_dict["source"] in settings.SAFE_SOURCES:
|
|
||||||
# continue
|
|
||||||
# if "src" in data_dict:
|
|
||||||
# if data_dict["src"] in settings.SAFE_SOURCES:
|
|
||||||
# continue
|
|
||||||
# if supplementary_data:
|
|
||||||
# if "source" in supplementary_data:
|
|
||||||
# if supplementary_data["source"] in settings.SAFE_SOURCES:
|
|
||||||
# continue
|
|
||||||
# if key in settings.SEARCH_FIELDS_DENY:
|
|
||||||
# if not user.has_perm("core.bypass_hashing"):
|
|
||||||
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
|
||||||
# denied.append(data_dict[key])
|
|
||||||
# if (
|
|
||||||
# key not in settings.WHITELIST_FIELDS
|
|
||||||
# and key not in settings.NO_OBFUSCATE_PARAMS
|
|
||||||
# ):
|
|
||||||
# if not value:
|
|
||||||
# continue
|
|
||||||
# # hashes = re.findall("\|([^\|]*)\|", value) # noqa
|
|
||||||
# if isinstance(value, str):
|
|
||||||
# hashes = re.findall("[A-Z0-9]{12,13}", value)
|
|
||||||
# elif isinstance(value, dict):
|
|
||||||
# hashes = []
|
|
||||||
# for key, value in value.items():
|
|
||||||
# if not value:
|
|
||||||
# continue
|
|
||||||
# hashes_iter = re.findall("[A-Z0-9]{12,13}", value)
|
|
||||||
# for h in hashes_iter:
|
|
||||||
# hashes.append(h)
|
|
||||||
# if not hashes:
|
|
||||||
# # Otherwise the user could inject plaintext search queries
|
|
||||||
# if not user.has_perm("core.bypass_hashing"):
|
|
||||||
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
|
||||||
# denied.append(data_dict[key])
|
|
||||||
# continue
|
|
||||||
# else:
|
|
||||||
# # There are hashes here but there shouldn't be!
|
|
||||||
# if key in settings.TAG_SEARCH_DENY:
|
|
||||||
# data_dict[key] = LookupDenied(key=key, value=data_dict[key])
|
|
||||||
# denied.append(data_dict[key])
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# for hash in hashes:
|
|
||||||
# hash_list.add(hash)
|
|
||||||
|
|
||||||
# if hash_list:
|
|
||||||
# values = r.hmget(cache, *hash_list)
|
|
||||||
# if not values:
|
|
||||||
# return
|
|
||||||
# for index, val in enumerate(values):
|
|
||||||
# if val is None:
|
|
||||||
# values[index] = b"ERR"
|
|
||||||
# values = [x.decode() for x in values]
|
|
||||||
# total = dict(zip(hash_list, values))
|
|
||||||
# for key in data_dict.keys():
|
|
||||||
# for hash in total:
|
|
||||||
# if data_dict[key]:
|
|
||||||
# if isinstance(data_dict[key], str):
|
|
||||||
# if hash in data_dict[key]:
|
|
||||||
# data_dict[key] = data_dict[key].replace(
|
|
||||||
# f"{hash}", total[hash]
|
|
||||||
# )
|
|
||||||
# elif isinstance(data_dict[key], dict):
|
|
||||||
# for k2, v2 in data_dict[key].items():
|
|
||||||
# if hash in v2:
|
|
||||||
# data_dict[key][k2] = v2.repl
|
|
||||||
# ace(f"{hash}", total[hash])
|
|
||||||
# return denied
|
|
||||||
|
|
||||||
|
|
||||||
# def encrypt_list(user, data, secret):
|
|
||||||
# if user.has_perm("core.bypass_encryption"):
|
|
||||||
# return
|
|
||||||
# cipher = Cipher(algorithms.AES(secret), ECB())
|
|
||||||
# for index, item in enumerate(data):
|
|
||||||
# for key, value in item.items():
|
|
||||||
# if key not in settings.WHITELIST_FIELDS:
|
|
||||||
# encryptor = cipher.encryptor()
|
|
||||||
# if isinstance(value, int):
|
|
||||||
# value = str(value)
|
|
||||||
# if isinstance(value, bool):
|
|
||||||
# continue
|
|
||||||
# if value is None:
|
|
||||||
# continue
|
|
||||||
# decoded = value.encode("utf8", "replace")
|
|
||||||
# length = 16 - (len(decoded) % 16)
|
|
||||||
# decoded += bytes([length]) * length
|
|
||||||
# ct = encryptor.update(decoded) + encryptor.finalize()
|
|
||||||
# final_str = b64encode(ct)
|
|
||||||
# data[index][key] = final_str.decode("utf-8", "replace")
|
|
||||||
0
core/views/manage/monolith/__init__.py
Normal file
0
core/views/manage/monolith/__init__.py
Normal file
36
core/views/manage/monolith/stats.py
Normal file
36
core/views/manage/monolith/stats.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from django.shortcuts import render
|
||||||
|
from django.views import View
|
||||||
|
from rest_framework.parsers import FormParser
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from core.db.storage import db
|
||||||
|
from mixins.views import ObjectRead
|
||||||
|
|
||||||
|
from core.views.manage.permissions import SuperUserRequiredMixin
|
||||||
|
|
||||||
|
class MonolithStats(SuperUserRequiredMixin, View):
|
||||||
|
template_name = "manage/monolith/stats/index.html"
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
return render(request, self.template_name)
|
||||||
|
|
||||||
|
class MonolithDBStats(SuperUserRequiredMixin, ObjectRead):
|
||||||
|
detail_template = "manage/monolith/stats/overview.html"
|
||||||
|
|
||||||
|
context_object_name_singular = "Status"
|
||||||
|
context_object_name = "Status"
|
||||||
|
|
||||||
|
detail_url_name = "monolith_stats_db"
|
||||||
|
detail_url_args = ["type"]
|
||||||
|
|
||||||
|
def get_object(self, **kwargs):
|
||||||
|
search_query = "SHOW TABLE main STATUS"
|
||||||
|
|
||||||
|
stats = db.run_query(
|
||||||
|
self.request.user,
|
||||||
|
search_query=search_query,
|
||||||
|
path="sql?mode=raw",
|
||||||
|
raw=True,
|
||||||
|
#method="get",
|
||||||
|
)
|
||||||
|
|
||||||
|
return stats
|
||||||
@@ -121,7 +121,6 @@ class ThresholdIRCNetworkRelayDel(SuperUserRequiredMixin, APIView):
|
|||||||
"""
|
"""
|
||||||
deleted = threshold.del_relay(net, num)
|
deleted = threshold.del_relay(net, num)
|
||||||
if deleted["success"]:
|
if deleted["success"]:
|
||||||
|
|
||||||
message = f"Deleted relay {num}"
|
message = f"Deleted relay {num}"
|
||||||
message_class = "success"
|
message_class = "success"
|
||||||
else:
|
else:
|
||||||
@@ -150,7 +149,6 @@ class ThresholdIRCNetworkRelayProvision(SuperUserRequiredMixin, APIView):
|
|||||||
"""
|
"""
|
||||||
provisioned = threshold.irc_provision_relay(net, num)
|
provisioned = threshold.irc_provision_relay(net, num)
|
||||||
if provisioned["success"]:
|
if provisioned["success"]:
|
||||||
|
|
||||||
message = f"Provisioned relay {num}"
|
message = f"Provisioned relay {num}"
|
||||||
message_class = "success"
|
message_class = "success"
|
||||||
else:
|
else:
|
||||||
@@ -179,7 +177,6 @@ class ThresholdIRCNetworkRelayAuth(SuperUserRequiredMixin, APIView):
|
|||||||
"""
|
"""
|
||||||
provisioned = threshold.irc_enable_auth(net, num)
|
provisioned = threshold.irc_enable_auth(net, num)
|
||||||
if provisioned["success"]:
|
if provisioned["success"]:
|
||||||
|
|
||||||
message = f"Enabled authentication on relay {num}"
|
message = f"Enabled authentication on relay {num}"
|
||||||
message_class = "success"
|
message_class = "success"
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
|
||||||
|
from django.shortcuts import render
|
||||||
|
from mixins.views import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from core.db.storage import db
|
||||||
from core.forms import NotificationRuleForm, NotificationSettingsForm
|
from core.forms import NotificationRuleForm, NotificationSettingsForm
|
||||||
|
from core.lib.rules import NotificationRuleData
|
||||||
from core.models import NotificationRule, NotificationSettings
|
from core.models import NotificationRule, NotificationSettings
|
||||||
from core.views.helpers import ObjectCreate, ObjectDelete, ObjectList, ObjectUpdate
|
|
||||||
|
|
||||||
|
|
||||||
# Notifications - we create a new notification settings object if there isn't one
|
# Notifications - we create a new notification settings object if there isn't one
|
||||||
@@ -33,7 +37,7 @@ class NotificationsUpdate(LoginRequiredMixin, ObjectUpdate):
|
|||||||
class RuleList(LoginRequiredMixin, ObjectList):
|
class RuleList(LoginRequiredMixin, ObjectList):
|
||||||
list_template = "partials/rule-list.html"
|
list_template = "partials/rule-list.html"
|
||||||
model = NotificationRule
|
model = NotificationRule
|
||||||
page_title = "List of notification rules."
|
page_title = "List of notification rules"
|
||||||
|
|
||||||
list_url_name = "rules"
|
list_url_name = "rules"
|
||||||
list_url_args = ["type"]
|
list_url_args = ["type"]
|
||||||
@@ -41,19 +45,46 @@ class RuleList(LoginRequiredMixin, ObjectList):
|
|||||||
submit_url_name = "rule_create"
|
submit_url_name = "rule_create"
|
||||||
|
|
||||||
|
|
||||||
class RuleCreate(LoginRequiredMixin, ObjectCreate):
|
class RuleCreate(LoginRequiredMixin, PermissionRequiredMixin, ObjectCreate):
|
||||||
|
permission_required = "use_rules"
|
||||||
model = NotificationRule
|
model = NotificationRule
|
||||||
form_class = NotificationRuleForm
|
form_class = NotificationRuleForm
|
||||||
|
|
||||||
submit_url_name = "rule_create"
|
submit_url_name = "rule_create"
|
||||||
|
|
||||||
|
|
||||||
class RuleUpdate(LoginRequiredMixin, ObjectUpdate):
|
class RuleUpdate(LoginRequiredMixin, PermissionRequiredMixin, ObjectUpdate):
|
||||||
|
permission_required = "use_rules"
|
||||||
model = NotificationRule
|
model = NotificationRule
|
||||||
form_class = NotificationRuleForm
|
form_class = NotificationRuleForm
|
||||||
|
|
||||||
submit_url_name = "rule_update"
|
submit_url_name = "rule_update"
|
||||||
|
|
||||||
|
|
||||||
class RuleDelete(LoginRequiredMixin, ObjectDelete):
|
class RuleDelete(LoginRequiredMixin, PermissionRequiredMixin, ObjectDelete):
|
||||||
|
permission_required = "use_rules"
|
||||||
model = NotificationRule
|
model = NotificationRule
|
||||||
|
|
||||||
|
|
||||||
|
class RuleClear(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
||||||
|
permission_required = "use_rules"
|
||||||
|
|
||||||
|
def post(self, request, type, pk):
|
||||||
|
template_name = "mixins/partials/notify.html"
|
||||||
|
rule = NotificationRule.objects.get(pk=pk, user=request.user)
|
||||||
|
if isinstance(rule.match, dict):
|
||||||
|
for index in rule.match:
|
||||||
|
rule.match[index] = None
|
||||||
|
rule.save()
|
||||||
|
|
||||||
|
rule_data = NotificationRuleData(rule.user, rule, db=db)
|
||||||
|
rule_data.clear_database_matches()
|
||||||
|
|
||||||
|
cleared_indices = ", ".join(rule.match)
|
||||||
|
context = {
|
||||||
|
"message": f"Cleared match status for indices: {cleared_indices}",
|
||||||
|
"class": "success",
|
||||||
|
}
|
||||||
|
response = render(request, template_name, context)
|
||||||
|
response["HX-Trigger"] = "notificationruleEvent"
|
||||||
|
return response
|
||||||
|
|||||||
@@ -81,15 +81,21 @@ def make_graph(results):
|
|||||||
graph = []
|
graph = []
|
||||||
for index, item in enumerate(results):
|
for index, item in enumerate(results):
|
||||||
date = str(index)
|
date = str(index)
|
||||||
|
sentiment = None
|
||||||
|
if "meta" in item:
|
||||||
|
if "aggs" in item["meta"]:
|
||||||
|
if "avg_sentiment" in item["meta"]["aggs"]:
|
||||||
|
sentiment = item["meta"]["aggs"]["avg_sentiment"]["value"]
|
||||||
|
else:
|
||||||
|
if "sentiment" in item:
|
||||||
|
sentiment = item["sentiment"]
|
||||||
graph.append(
|
graph.append(
|
||||||
{
|
{
|
||||||
"text": item.get("words_noun", None)
|
"text": item.get("msg", None) or item.get("id"),
|
||||||
or item.get("msg", None)
|
|
||||||
or item.get("id"),
|
|
||||||
"nick": item.get("nick", None),
|
"nick": item.get("nick", None),
|
||||||
"channel": item.get("channel", None),
|
"channel": item.get("channel", None),
|
||||||
"net": item.get("net", None),
|
"net": item.get("net", None),
|
||||||
"value": item.get("sentiment", None) or None,
|
"value": sentiment,
|
||||||
"date": date,
|
"date": date,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -98,10 +104,11 @@ def make_graph(results):
|
|||||||
|
|
||||||
class DrilldownTableView(SingleTableView):
|
class DrilldownTableView(SingleTableView):
|
||||||
table_class = DrilldownTable
|
table_class = DrilldownTable
|
||||||
template_name = "wm/widget.html"
|
template_name = "mixins/wm/widget.html"
|
||||||
window_content = "window-content/results.html"
|
window_content = "window-content/results.html"
|
||||||
# htmx_partial = "partials/"
|
# htmx_partial = "partials/"
|
||||||
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
|
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
|
||||||
|
widget_options = 'gs-w="10" gs-h="1" gs-y="10" gs-x="1"'
|
||||||
|
|
||||||
def common_request(self, request, **kwargs):
|
def common_request(self, request, **kwargs):
|
||||||
extra_params = {}
|
extra_params = {}
|
||||||
@@ -112,16 +119,20 @@ class DrilldownTableView(SingleTableView):
|
|||||||
sizes = settings.MAIN_SIZES
|
sizes = settings.MAIN_SIZES
|
||||||
|
|
||||||
if request.GET:
|
if request.GET:
|
||||||
|
print("GET")
|
||||||
self.template_name = "index.html"
|
self.template_name = "index.html"
|
||||||
# GET arguments in URL like ?query=xyz
|
# GET arguments in URL like ?query=xyz
|
||||||
query_params = request.GET.dict()
|
query_params = request.GET.dict()
|
||||||
|
print("QUERY_PARAMS GET", query_params)
|
||||||
if request.htmx:
|
if request.htmx:
|
||||||
if request.resolver_match.url_name == "search_partial":
|
if request.resolver_match.url_name == "search_partial":
|
||||||
self.template_name = "partials/results_table.html"
|
self.template_name = "partials/results_table.html"
|
||||||
elif request.POST:
|
elif request.POST:
|
||||||
|
print("POST")
|
||||||
query_params = request.POST.dict()
|
query_params = request.POST.dict()
|
||||||
else:
|
else:
|
||||||
self.template_name = "index.html"
|
self.template_name = "index.html"
|
||||||
|
print("FRESH")
|
||||||
# No query, this is a fresh page load
|
# No query, this is a fresh page load
|
||||||
# Don't try to search, since there's clearly nothing to do
|
# Don't try to search, since there's clearly nothing to do
|
||||||
params_with_defaults = {}
|
params_with_defaults = {}
|
||||||
@@ -130,6 +141,7 @@ class DrilldownTableView(SingleTableView):
|
|||||||
"sizes": sizes,
|
"sizes": sizes,
|
||||||
"params": params_with_defaults,
|
"params": params_with_defaults,
|
||||||
"unique": "results",
|
"unique": "results",
|
||||||
|
"widget_options": self.widget_options,
|
||||||
"window_content": self.window_content,
|
"window_content": self.window_content,
|
||||||
"title": "Results",
|
"title": "Results",
|
||||||
}
|
}
|
||||||
@@ -187,6 +199,7 @@ class DrilldownTableView(SingleTableView):
|
|||||||
# We don't want a random one since we only want one results pane.
|
# We don't want a random one since we only want one results pane.
|
||||||
context["unique"] = "results"
|
context["unique"] = "results"
|
||||||
context["window_content"] = self.window_content
|
context["window_content"] = self.window_content
|
||||||
|
context["widget_options"] = self.widget_options
|
||||||
context["title"] = "Results"
|
context["title"] = "Results"
|
||||||
|
|
||||||
# Valid sizes
|
# Valid sizes
|
||||||
@@ -209,9 +222,9 @@ class DrilldownTableView(SingleTableView):
|
|||||||
# Still push the URL so they can share it to get assistance
|
# Still push the URL so they can share it to get assistance
|
||||||
if request.GET:
|
if request.GET:
|
||||||
if request.htmx:
|
if request.htmx:
|
||||||
response["HX-Push"] = reverse("home") + "?" + url_params
|
response["HX-Replace-Url"] = reverse("home") + "?" + url_params
|
||||||
elif request.POST:
|
elif request.POST:
|
||||||
response["HX-Push"] = reverse("home") + "?" + url_params
|
response["HX-Replace-Url"] = reverse("home") + "?" + url_params
|
||||||
return response
|
return response
|
||||||
|
|
||||||
# Create data for chart.js sentiment graph
|
# Create data for chart.js sentiment graph
|
||||||
@@ -265,7 +278,7 @@ class DrilldownTableView(SingleTableView):
|
|||||||
response = self.render_to_response(context)
|
response = self.render_to_response(context)
|
||||||
# if not request.method == "GET":
|
# if not request.method == "GET":
|
||||||
if "client_uri" in context:
|
if "client_uri" in context:
|
||||||
response["HX-Push"] = reverse("home") + "?" + context["client_uri"]
|
response["HX-Replace-Url"] = reverse("home") + "?" + context["client_uri"]
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
@@ -443,7 +456,7 @@ class ThresholdInfoModal(APIView):
|
|||||||
channel = request.data["channel"]
|
channel = request.data["channel"]
|
||||||
|
|
||||||
channels = get_chans(net, [nick])
|
channels = get_chans(net, [nick])
|
||||||
users = get_users(net, [nick])
|
users = get_users(net, [channel])
|
||||||
num_users = annotate_num_users(net, channels)
|
num_users = annotate_num_users(net, channels)
|
||||||
num_chans = annotate_num_chans(net, users)
|
num_chans = annotate_num_chans(net, users)
|
||||||
if channels:
|
if channels:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from django.views import View
|
|||||||
from rest_framework.parsers import FormParser
|
from rest_framework.parsers import FormParser
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from core.db.druid import query_single_result
|
from core.db.storage import db
|
||||||
from core.lib.meta import get_meta
|
from core.lib.meta import get_meta
|
||||||
from core.lib.nicktrace import get_nicks
|
from core.lib.nicktrace import get_nicks
|
||||||
from core.lib.threshold import (
|
from core.lib.threshold import (
|
||||||
@@ -23,8 +23,9 @@ class Insights(LoginRequiredMixin, PermissionRequiredMixin, View):
|
|||||||
template_name = "ui/insights/insights.html"
|
template_name = "ui/insights/insights.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def get(self, request):
|
def get(self, request, index):
|
||||||
return render(request, self.template_name)
|
context = {"index": index}
|
||||||
|
return render(request, self.template_name, context)
|
||||||
|
|
||||||
|
|
||||||
class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View):
|
class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View):
|
||||||
@@ -32,13 +33,16 @@ class InsightsSearch(LoginRequiredMixin, PermissionRequiredMixin, View):
|
|||||||
template_name = "ui/insights/info.html"
|
template_name = "ui/insights/info.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request, index):
|
||||||
query_params = request.POST.dict()
|
query_params = request.POST.dict()
|
||||||
if "query_full" in query_params:
|
if "query" in query_params:
|
||||||
query_params["query_full"] = "nick: " + query_params["query_full"]
|
query_params["query"] = "nick: " + query_params["query"]
|
||||||
context = query_single_result(request, query_params)
|
query_params["source"] = "all"
|
||||||
|
query_params["index"] = index
|
||||||
|
context = db.query_single_result(request, query_params)
|
||||||
if not context:
|
if not context:
|
||||||
return HttpResponseForbidden()
|
return HttpResponseForbidden()
|
||||||
|
context["index"] = index
|
||||||
return render(request, self.template_name, context)
|
return render(request, self.template_name, context)
|
||||||
|
|
||||||
|
|
||||||
@@ -47,7 +51,7 @@ class InsightsChannels(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
template_name = "ui/insights/channels.html"
|
template_name = "ui/insights/channels.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request, index):
|
||||||
if "net" not in request.data:
|
if "net" not in request.data:
|
||||||
return HttpResponse("No net")
|
return HttpResponse("No net")
|
||||||
if "nick" not in request.data:
|
if "nick" not in request.data:
|
||||||
@@ -58,7 +62,13 @@ class InsightsChannels(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
num_users = annotate_num_users(net, chans)
|
num_users = annotate_num_users(net, chans)
|
||||||
if not chans:
|
if not chans:
|
||||||
return HttpResponseForbidden()
|
return HttpResponseForbidden()
|
||||||
context = {"net": net, "nick": nick, "chans": chans, "num_users": num_users}
|
context = {
|
||||||
|
"net": net,
|
||||||
|
"nick": nick,
|
||||||
|
"chans": chans,
|
||||||
|
"num_users": num_users,
|
||||||
|
"index": index,
|
||||||
|
}
|
||||||
return render(request, self.template_name, context)
|
return render(request, self.template_name, context)
|
||||||
|
|
||||||
|
|
||||||
@@ -67,7 +77,7 @@ class InsightsNicks(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
template_name = "ui/insights/nicks.html"
|
template_name = "ui/insights/nicks.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request, index):
|
||||||
if "net" not in request.data:
|
if "net" not in request.data:
|
||||||
return HttpResponse("No net")
|
return HttpResponse("No net")
|
||||||
if "nick" not in request.data:
|
if "nick" not in request.data:
|
||||||
@@ -82,7 +92,13 @@ class InsightsNicks(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
online = annotate_online(net, nicks)
|
online = annotate_online(net, nicks)
|
||||||
if not nicks:
|
if not nicks:
|
||||||
return HttpResponseForbidden()
|
return HttpResponseForbidden()
|
||||||
context = {"net": net, "nick": nick, "nicks": nicks, "online": online}
|
context = {
|
||||||
|
"net": net,
|
||||||
|
"nick": nick,
|
||||||
|
"nicks": nicks,
|
||||||
|
"online": online,
|
||||||
|
"index": index,
|
||||||
|
}
|
||||||
return render(request, self.template_name, context)
|
return render(request, self.template_name, context)
|
||||||
|
|
||||||
|
|
||||||
@@ -91,7 +107,7 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
template_name = "ui/insights/meta.html"
|
template_name = "ui/insights/meta.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request, index):
|
||||||
if "net" not in request.data:
|
if "net" not in request.data:
|
||||||
return HttpResponse("No net")
|
return HttpResponse("No net")
|
||||||
if "nicks" not in request.data:
|
if "nicks" not in request.data:
|
||||||
@@ -99,6 +115,10 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
net = request.data["net"]
|
net = request.data["net"]
|
||||||
nicks = request.data["nicks"]
|
nicks = request.data["nicks"]
|
||||||
nicks = literal_eval(nicks)
|
nicks = literal_eval(nicks)
|
||||||
|
|
||||||
|
# Check the user has permissions to use the meta index
|
||||||
|
if not request.user.has_perm("core.index_meta"):
|
||||||
|
return HttpResponseForbidden()
|
||||||
meta = get_meta(request, net, nicks)
|
meta = get_meta(request, net, nicks)
|
||||||
unique_values = {}
|
unique_values = {}
|
||||||
# Create a map of unique values for each key for each nick
|
# Create a map of unique values for each key for each nick
|
||||||
@@ -122,7 +142,7 @@ class InsightsMeta(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
meta_dedup[k].add(v)
|
meta_dedup[k].add(v)
|
||||||
unique_values[nick][k].remove(v)
|
unique_values[nick][k].remove(v)
|
||||||
|
|
||||||
context = {"net": net, "nicks": nicks, "meta": meta_dedup}
|
context = {"net": net, "nicks": nicks, "meta": meta_dedup, "index": index}
|
||||||
return render(request, self.template_name, context)
|
return render(request, self.template_name, context)
|
||||||
|
|
||||||
|
|
||||||
@@ -131,7 +151,7 @@ class InsightsInfoModal(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
template_name = "modals/drilldown.html"
|
template_name = "modals/drilldown.html"
|
||||||
permission_required = "use_insights"
|
permission_required = "use_insights"
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request, index):
|
||||||
if "net" not in request.data:
|
if "net" not in request.data:
|
||||||
return JsonResponse({"success": False})
|
return JsonResponse({"success": False})
|
||||||
if "nick" not in request.data:
|
if "nick" not in request.data:
|
||||||
@@ -163,5 +183,6 @@ class InsightsInfoModal(LoginRequiredMixin, PermissionRequiredMixin, APIView):
|
|||||||
"inter_users": inter_users,
|
"inter_users": inter_users,
|
||||||
"num_users": num_users,
|
"num_users": num_users,
|
||||||
"num_chans": num_chans,
|
"num_chans": num_chans,
|
||||||
|
"index": index,
|
||||||
}
|
}
|
||||||
return render(request, self.template_name, context)
|
return render(request, self.template_name, context)
|
||||||
|
|||||||
@@ -16,6 +16,8 @@ def format_header(self):
|
|||||||
header = header.replace("id", "ID")
|
header = header.replace("id", "ID")
|
||||||
if header == "Ts":
|
if header == "Ts":
|
||||||
header = "TS"
|
header = "TS"
|
||||||
|
if header == "Match Ts":
|
||||||
|
header = "Match TS"
|
||||||
header = header.replace("Nsfw", "NSFW")
|
header = header.replace("Nsfw", "NSFW")
|
||||||
|
|
||||||
return header
|
return header
|
||||||
@@ -76,6 +78,12 @@ class DrilldownTable(Table):
|
|||||||
file_md5 = Column()
|
file_md5 = Column()
|
||||||
file_ext = Column()
|
file_ext = Column()
|
||||||
file_size = Column()
|
file_size = Column()
|
||||||
|
rule_id = Column()
|
||||||
|
batch_id = Column()
|
||||||
|
index = Column()
|
||||||
|
meta = Column()
|
||||||
|
match_ts = Column()
|
||||||
|
mode = Column()
|
||||||
|
|
||||||
template_name = "ui/drilldown/table_results.html"
|
template_name = "ui/drilldown/table_results.html"
|
||||||
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
|
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
|
||||||
|
|||||||
579
docker-compose.prod.yml
Normal file
579
docker-compose.prod.yml
Normal file
@@ -0,0 +1,579 @@
|
|||||||
|
version: "2.2"
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
image: xf/neptune:latest
|
||||||
|
container_name: neptune
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OPERATION: ${OPERATION}
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}:/code
|
||||||
|
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
|
||||||
|
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
||||||
|
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
# env_file:
|
||||||
|
# - stack.env
|
||||||
|
environment:
|
||||||
|
# General application settings
|
||||||
|
APP_PORT: "${APP_PORT}"
|
||||||
|
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
|
||||||
|
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
|
||||||
|
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
|
||||||
|
STATIC_ROOT: "${STATIC_ROOT}"
|
||||||
|
OPERATION: "${OPERATION}"
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
|
||||||
|
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
|
||||||
|
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
|
||||||
|
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
|
||||||
|
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL: "${MANTICORE_URL}"
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND: "${DB_BACKEND}"
|
||||||
|
INDEX_MAIN: "${INDEX_MAIN}"
|
||||||
|
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
|
||||||
|
INDEX_META: "${INDEX_META}"
|
||||||
|
INDEX_INT: "${INDEX_INT}"
|
||||||
|
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
|
||||||
|
MAIN_SIZES: "${MAIN_SIZES}"
|
||||||
|
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
|
||||||
|
MAIN_SOURCES: "${MAIN_SOURCES}"
|
||||||
|
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
|
||||||
|
CACHE: "${CACHE}"
|
||||||
|
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
|
||||||
|
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
|
||||||
|
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
|
||||||
|
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
|
||||||
|
# URLs: "${# URLs}"
|
||||||
|
DOMAIN: "${DOMAIN}"
|
||||||
|
URL: "${URL}"
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED: "${BILLING_ENABLED}"
|
||||||
|
STRIPE_TEST: "${STRIPE_TEST}"
|
||||||
|
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
|
||||||
|
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
|
||||||
|
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
|
||||||
|
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
|
||||||
|
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
|
||||||
|
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
|
||||||
|
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
|
||||||
|
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
|
||||||
|
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
|
||||||
|
META_QUERY_SIZE: "${META_QUERY_SIZE}"
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG: "${DEBUG}"
|
||||||
|
PROFILER: "${PROFILER}"
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST: "${REDIS_HOST}"
|
||||||
|
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||||
|
REDIS_DB: "${REDIS_DB}"
|
||||||
|
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
|
||||||
|
REDIS_PORT: "${REDIS_PORT}"
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
migration:
|
||||||
|
condition: service_started
|
||||||
|
collectstatic:
|
||||||
|
condition: service_started
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# - db
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
processing:
|
||||||
|
image: xf/neptune:latest
|
||||||
|
container_name: processing_neptune
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OPERATION: ${OPERATION}
|
||||||
|
command: sh -c '. /venv/bin/activate && python manage.py processing'
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}:/code
|
||||||
|
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
|
||||||
|
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
||||||
|
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
environment:
|
||||||
|
# General application settings
|
||||||
|
APP_PORT: "${APP_PORT}"
|
||||||
|
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
|
||||||
|
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
|
||||||
|
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
|
||||||
|
STATIC_ROOT: "${STATIC_ROOT}"
|
||||||
|
OPERATION: "${OPERATION}"
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
|
||||||
|
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
|
||||||
|
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
|
||||||
|
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
|
||||||
|
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL: "${MANTICORE_URL}"
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND: "${DB_BACKEND}"
|
||||||
|
INDEX_MAIN: "${INDEX_MAIN}"
|
||||||
|
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
|
||||||
|
INDEX_META: "${INDEX_META}"
|
||||||
|
INDEX_INT: "${INDEX_INT}"
|
||||||
|
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
|
||||||
|
MAIN_SIZES: "${MAIN_SIZES}"
|
||||||
|
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
|
||||||
|
MAIN_SOURCES: "${MAIN_SOURCES}"
|
||||||
|
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
|
||||||
|
CACHE: "${CACHE}"
|
||||||
|
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
|
||||||
|
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
|
||||||
|
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
|
||||||
|
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
|
||||||
|
# URLs: "${# URLs}"
|
||||||
|
DOMAIN: "${DOMAIN}"
|
||||||
|
URL: "${URL}"
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED: "${BILLING_ENABLED}"
|
||||||
|
STRIPE_TEST: "${STRIPE_TEST}"
|
||||||
|
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
|
||||||
|
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
|
||||||
|
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
|
||||||
|
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
|
||||||
|
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
|
||||||
|
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
|
||||||
|
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
|
||||||
|
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
|
||||||
|
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
|
||||||
|
META_QUERY_SIZE: "${META_QUERY_SIZE}"
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG: "${DEBUG}"
|
||||||
|
PROFILER: "${PROFILER}"
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST: "${REDIS_HOST}"
|
||||||
|
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||||
|
REDIS_DB: "${REDIS_DB}"
|
||||||
|
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
|
||||||
|
REDIS_PORT: "${REDIS_PORT}"
|
||||||
|
# volumes_from:
|
||||||
|
# - tmp
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
migration:
|
||||||
|
condition: service_started
|
||||||
|
collectstatic:
|
||||||
|
condition: service_started
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# - db
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
scheduling:
|
||||||
|
image: xf/neptune:latest
|
||||||
|
container_name: scheduling_neptune
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OPERATION: ${OPERATION}
|
||||||
|
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}:/code
|
||||||
|
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
|
||||||
|
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
||||||
|
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
environment:
|
||||||
|
# General application settings
|
||||||
|
APP_PORT: "${APP_PORT}"
|
||||||
|
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
|
||||||
|
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
|
||||||
|
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
|
||||||
|
STATIC_ROOT: "${STATIC_ROOT}"
|
||||||
|
OPERATION: "${OPERATION}"
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
|
||||||
|
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
|
||||||
|
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
|
||||||
|
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
|
||||||
|
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL: "${MANTICORE_URL}"
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND: "${DB_BACKEND}"
|
||||||
|
INDEX_MAIN: "${INDEX_MAIN}"
|
||||||
|
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
|
||||||
|
INDEX_META: "${INDEX_META}"
|
||||||
|
INDEX_INT: "${INDEX_INT}"
|
||||||
|
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
|
||||||
|
MAIN_SIZES: "${MAIN_SIZES}"
|
||||||
|
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
|
||||||
|
MAIN_SOURCES: "${MAIN_SOURCES}"
|
||||||
|
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
|
||||||
|
CACHE: "${CACHE}"
|
||||||
|
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
|
||||||
|
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
|
||||||
|
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
|
||||||
|
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
|
||||||
|
# URLs: "${# URLs}"
|
||||||
|
DOMAIN: "${DOMAIN}"
|
||||||
|
URL: "${URL}"
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED: "${BILLING_ENABLED}"
|
||||||
|
STRIPE_TEST: "${STRIPE_TEST}"
|
||||||
|
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
|
||||||
|
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
|
||||||
|
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
|
||||||
|
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
|
||||||
|
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
|
||||||
|
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
|
||||||
|
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
|
||||||
|
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
|
||||||
|
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
|
||||||
|
META_QUERY_SIZE: "${META_QUERY_SIZE}"
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG: "${DEBUG}"
|
||||||
|
PROFILER: "${PROFILER}"
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST: "${REDIS_HOST}"
|
||||||
|
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||||
|
REDIS_DB: "${REDIS_DB}"
|
||||||
|
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
|
||||||
|
REDIS_PORT: "${REDIS_PORT}"
|
||||||
|
# volumes_from:
|
||||||
|
# - tmp
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
migration:
|
||||||
|
condition: service_started
|
||||||
|
collectstatic:
|
||||||
|
condition: service_started
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# - db
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
migration:
|
||||||
|
image: xf/neptune:latest
|
||||||
|
container_name: migration_neptune
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OPERATION: ${OPERATION}
|
||||||
|
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}:/code
|
||||||
|
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
||||||
|
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
environment:
|
||||||
|
# General application settings
|
||||||
|
APP_PORT: "${APP_PORT}"
|
||||||
|
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
|
||||||
|
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
|
||||||
|
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
|
||||||
|
STATIC_ROOT: "${STATIC_ROOT}"
|
||||||
|
OPERATION: "${OPERATION}"
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
|
||||||
|
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
|
||||||
|
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
|
||||||
|
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
|
||||||
|
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL: "${MANTICORE_URL}"
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND: "${DB_BACKEND}"
|
||||||
|
INDEX_MAIN: "${INDEX_MAIN}"
|
||||||
|
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
|
||||||
|
INDEX_META: "${INDEX_META}"
|
||||||
|
INDEX_INT: "${INDEX_INT}"
|
||||||
|
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
|
||||||
|
MAIN_SIZES: "${MAIN_SIZES}"
|
||||||
|
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
|
||||||
|
MAIN_SOURCES: "${MAIN_SOURCES}"
|
||||||
|
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
|
||||||
|
CACHE: "${CACHE}"
|
||||||
|
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
|
||||||
|
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
|
||||||
|
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
|
||||||
|
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
|
||||||
|
# URLs: "${# URLs}"
|
||||||
|
DOMAIN: "${DOMAIN}"
|
||||||
|
URL: "${URL}"
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED: "${BILLING_ENABLED}"
|
||||||
|
STRIPE_TEST: "${STRIPE_TEST}"
|
||||||
|
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
|
||||||
|
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
|
||||||
|
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
|
||||||
|
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
|
||||||
|
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
|
||||||
|
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
|
||||||
|
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
|
||||||
|
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
|
||||||
|
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
|
||||||
|
META_QUERY_SIZE: "${META_QUERY_SIZE}"
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG: "${DEBUG}"
|
||||||
|
PROFILER: "${PROFILER}"
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST: "${REDIS_HOST}"
|
||||||
|
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||||
|
REDIS_DB: "${REDIS_DB}"
|
||||||
|
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
|
||||||
|
REDIS_PORT: "${REDIS_PORT}"
|
||||||
|
# volumes_from:
|
||||||
|
# - tmp
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# - db
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
collectstatic:
|
||||||
|
image: xf/neptune:latest
|
||||||
|
container_name: collectstatic_neptune
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OPERATION: ${OPERATION}
|
||||||
|
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}:/code
|
||||||
|
#- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
||||||
|
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
# volumes_from:
|
||||||
|
# - tmp
|
||||||
|
environment:
|
||||||
|
# General application settings
|
||||||
|
APP_PORT: "${APP_PORT}"
|
||||||
|
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
|
||||||
|
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
|
||||||
|
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
|
||||||
|
STATIC_ROOT: "${STATIC_ROOT}"
|
||||||
|
OPERATION: "${OPERATION}"
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
|
||||||
|
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
|
||||||
|
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
|
||||||
|
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
|
||||||
|
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL: "${MANTICORE_URL}"
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND: "${DB_BACKEND}"
|
||||||
|
INDEX_MAIN: "${INDEX_MAIN}"
|
||||||
|
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
|
||||||
|
INDEX_META: "${INDEX_META}"
|
||||||
|
INDEX_INT: "${INDEX_INT}"
|
||||||
|
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
|
||||||
|
MAIN_SIZES: "${MAIN_SIZES}"
|
||||||
|
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
|
||||||
|
MAIN_SOURCES: "${MAIN_SOURCES}"
|
||||||
|
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
|
||||||
|
CACHE: "${CACHE}"
|
||||||
|
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
|
||||||
|
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
|
||||||
|
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
|
||||||
|
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
|
||||||
|
# URLs: "${# URLs}"
|
||||||
|
DOMAIN: "${DOMAIN}"
|
||||||
|
URL: "${URL}"
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED: "${BILLING_ENABLED}"
|
||||||
|
STRIPE_TEST: "${STRIPE_TEST}"
|
||||||
|
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
|
||||||
|
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
|
||||||
|
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
|
||||||
|
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
|
||||||
|
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
|
||||||
|
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
|
||||||
|
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
|
||||||
|
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
|
||||||
|
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
|
||||||
|
META_QUERY_SIZE: "${META_QUERY_SIZE}"
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG: "${DEBUG}"
|
||||||
|
PROFILER: "${PROFILER}"
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST: "${REDIS_HOST}"
|
||||||
|
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||||
|
REDIS_DB: "${REDIS_DB}"
|
||||||
|
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
|
||||||
|
REDIS_PORT: "${REDIS_PORT}"
|
||||||
|
depends_on:
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# - db
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
# nginx:
|
||||||
|
# image: nginx:latest
|
||||||
|
# container_name: nginx_neptune
|
||||||
|
# ports:
|
||||||
|
# - ${APP_PORT}:9999
|
||||||
|
# ulimits:
|
||||||
|
# nproc: 65535
|
||||||
|
# nofile:
|
||||||
|
# soft: 65535
|
||||||
|
# hard: 65535
|
||||||
|
# volumes:
|
||||||
|
# - ${PORTAINER_GIT_DIR}:/code
|
||||||
|
# - ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
# - neptune_static:${STATIC_ROOT}
|
||||||
|
# # volumes_from:
|
||||||
|
# # - tmp
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
# depends_on:
|
||||||
|
# app:
|
||||||
|
# condition: service_started
|
||||||
|
|
||||||
|
# tmp:
|
||||||
|
# image: busybox
|
||||||
|
# container_name: tmp_neptune
|
||||||
|
# command: chmod -R 777 /var/run/socks
|
||||||
|
# volumes:
|
||||||
|
# - /var/run/socks
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis
|
||||||
|
container_name: redis_neptune
|
||||||
|
command: redis-server /etc/redis.conf
|
||||||
|
# ulimits:
|
||||||
|
# nproc: 65535
|
||||||
|
# nofile:
|
||||||
|
# soft: 65535
|
||||||
|
# hard: 65535
|
||||||
|
volumes:
|
||||||
|
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
|
||||||
|
- neptune_redis_data:/data
|
||||||
|
- type: bind
|
||||||
|
source: /code/run
|
||||||
|
target: /var/run
|
||||||
|
# volumes_from:
|
||||||
|
# - tmp
|
||||||
|
healthcheck:
|
||||||
|
test: "redis-cli ping"
|
||||||
|
interval: 2s
|
||||||
|
timeout: 2s
|
||||||
|
retries: 15
|
||||||
|
# networks:
|
||||||
|
# - default
|
||||||
|
# - xf
|
||||||
|
|
||||||
|
# networks:
|
||||||
|
# default:
|
||||||
|
# driver: bridge
|
||||||
|
# xf:
|
||||||
|
# external: true
|
||||||
|
# db:
|
||||||
|
# external: true
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
# neptune_static: {}
|
||||||
|
neptune_redis_data: {}
|
||||||
@@ -1,162 +0,0 @@
|
|||||||
version: "2.2"
|
|
||||||
|
|
||||||
services:
|
|
||||||
app:
|
|
||||||
image: pathogen/neptune:latest
|
|
||||||
container_name: neptune
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
args:
|
|
||||||
OPERATION: ${OPERATION}
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}:/code
|
|
||||||
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
|
|
||||||
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
|
||||||
- ${APP_DATABASE_FILE}:/code/db.sqlite3
|
|
||||||
- neptune_static:${STATIC_ROOT}
|
|
||||||
env_file:
|
|
||||||
- stack.env
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
depends_on:
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
migration:
|
|
||||||
condition: service_started
|
|
||||||
collectstatic:
|
|
||||||
condition: service_started
|
|
||||||
networks:
|
|
||||||
- default
|
|
||||||
- pathogen
|
|
||||||
- elastic
|
|
||||||
|
|
||||||
processing:
|
|
||||||
image: pathogen/neptune:latest
|
|
||||||
container_name: processing_neptune
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
args:
|
|
||||||
OPERATION: ${OPERATION}
|
|
||||||
command: sh -c '. /venv/bin/activate && python manage.py processing'
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}:/code
|
|
||||||
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
|
|
||||||
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
|
||||||
- ${APP_DATABASE_FILE}:/code/db.sqlite3
|
|
||||||
- neptune_static:${STATIC_ROOT}
|
|
||||||
env_file:
|
|
||||||
- stack.env
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
depends_on:
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
migration:
|
|
||||||
condition: service_started
|
|
||||||
collectstatic:
|
|
||||||
condition: service_started
|
|
||||||
networks:
|
|
||||||
- default
|
|
||||||
|
|
||||||
migration:
|
|
||||||
image: pathogen/neptune:latest
|
|
||||||
container_name: migration_neptune
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
args:
|
|
||||||
OPERATION: ${OPERATION}
|
|
||||||
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}:/code
|
|
||||||
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
|
||||||
- ${APP_DATABASE_FILE}:/code/db.sqlite3
|
|
||||||
- neptune_static:${STATIC_ROOT}
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
depends_on:
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
|
|
||||||
collectstatic:
|
|
||||||
image: pathogen/neptune:latest
|
|
||||||
container_name: collectstatic_neptune
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
args:
|
|
||||||
OPERATION: ${OPERATION}
|
|
||||||
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}:/code
|
|
||||||
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
|
|
||||||
- ${APP_DATABASE_FILE}:/code/db.sqlite3
|
|
||||||
- neptune_static:${STATIC_ROOT}
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
env_file:
|
|
||||||
- stack.env
|
|
||||||
depends_on:
|
|
||||||
redis:
|
|
||||||
condition: service_healthy
|
|
||||||
|
|
||||||
nginx:
|
|
||||||
image: nginx:latest
|
|
||||||
container_name: nginx_neptune
|
|
||||||
ports:
|
|
||||||
- ${APP_PORT}:9999
|
|
||||||
ulimits:
|
|
||||||
nproc: 65535
|
|
||||||
nofile:
|
|
||||||
soft: 65535
|
|
||||||
hard: 65535
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}:/code
|
|
||||||
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
|
|
||||||
- neptune_static:${STATIC_ROOT}
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
networks:
|
|
||||||
- default
|
|
||||||
- pathogen
|
|
||||||
depends_on:
|
|
||||||
app:
|
|
||||||
condition: service_started
|
|
||||||
|
|
||||||
tmp:
|
|
||||||
image: busybox
|
|
||||||
container_name: tmp_neptune
|
|
||||||
command: chmod -R 777 /var/run/socks
|
|
||||||
volumes:
|
|
||||||
- /var/run/socks
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis
|
|
||||||
container_name: redis_neptune
|
|
||||||
command: redis-server /etc/redis.conf
|
|
||||||
ulimits:
|
|
||||||
nproc: 65535
|
|
||||||
nofile:
|
|
||||||
soft: 65535
|
|
||||||
hard: 65535
|
|
||||||
volumes:
|
|
||||||
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
|
|
||||||
volumes_from:
|
|
||||||
- tmp
|
|
||||||
healthcheck:
|
|
||||||
test: "redis-cli -s /var/run/socks/redis.sock ping"
|
|
||||||
interval: 2s
|
|
||||||
timeout: 2s
|
|
||||||
retries: 15
|
|
||||||
networks:
|
|
||||||
- default
|
|
||||||
- pathogen
|
|
||||||
|
|
||||||
networks:
|
|
||||||
default:
|
|
||||||
driver: bridge
|
|
||||||
pathogen:
|
|
||||||
external: true
|
|
||||||
elastic:
|
|
||||||
external: true
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
neptune_static: {}
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
unixsocket /var/run/socks/redis.sock
|
unixsocket /var/run/neptune-redis.sock
|
||||||
unixsocketperm 777
|
unixsocketperm 777
|
||||||
|
port 0
|
||||||
# For Monolith PubSub
|
# port 6379
|
||||||
port 6379
|
# requirepass changeme
|
||||||
@@ -4,9 +4,19 @@ module=app.wsgi:application
|
|||||||
env=DJANGO_SETTINGS_MODULE=app.settings
|
env=DJANGO_SETTINGS_MODULE=app.settings
|
||||||
master=1
|
master=1
|
||||||
pidfile=/tmp/project-master.pid
|
pidfile=/tmp/project-master.pid
|
||||||
socket=0.0.0.0:8000
|
#socket=0.0.0.0:8000
|
||||||
|
socket=/var/run/uwsgi-neptune.sock
|
||||||
|
# socket 777
|
||||||
|
chmod-socket=777
|
||||||
harakiri=20
|
harakiri=20
|
||||||
max-requests=100000
|
#max-requests=100000
|
||||||
|
# Set a lower value for max-requests to prevent memory leaks from building up over time
|
||||||
|
max-requests=1000
|
||||||
|
# Ensure old worker processes are cleaned up properly
|
||||||
|
reload-on-as=512
|
||||||
|
reload-on-rss=256
|
||||||
vacuum=1
|
vacuum=1
|
||||||
home=/venv
|
home=/venv
|
||||||
processes=12
|
processes=4
|
||||||
|
threads=2
|
||||||
|
log-level=debug
|
||||||
@@ -4,7 +4,7 @@ django
|
|||||||
pre-commit
|
pre-commit
|
||||||
django-crispy-forms
|
django-crispy-forms
|
||||||
crispy-bulma
|
crispy-bulma
|
||||||
elasticsearch
|
elasticsearch[async]
|
||||||
stripe
|
stripe
|
||||||
django-rest-framework
|
django-rest-framework
|
||||||
numpy
|
numpy
|
||||||
@@ -19,3 +19,12 @@ django-debug-toolbar
|
|||||||
django-debug-toolbar-template-profiler
|
django-debug-toolbar-template-profiler
|
||||||
orjson
|
orjson
|
||||||
msgpack
|
msgpack
|
||||||
|
apscheduler
|
||||||
|
django-prettyjson
|
||||||
|
git+https://git.zm.is/XF/django-crud-mixins
|
||||||
|
# For caching
|
||||||
|
redis
|
||||||
|
hiredis
|
||||||
|
django-cachalot
|
||||||
|
django_redis
|
||||||
|
httpx
|
||||||
84
stack.env
84
stack.env
@@ -1,6 +1,86 @@
|
|||||||
|
# General application settings
|
||||||
APP_PORT=5000
|
APP_PORT=5000
|
||||||
PORTAINER_GIT_DIR=.
|
PORTAINER_GIT_DIR=.
|
||||||
APP_LOCAL_SETTINGS=./app/local_settings.py
|
APP_LOCAL_SETTINGS=./app/local_settings.py
|
||||||
APP_DATABASE_FILE=./db.sqlite3
|
APP_DATABASE_FILE=./db.sqlite3
|
||||||
STATIC_ROOT=/conf/static
|
STATIC_ROOT=/code/static
|
||||||
OPERATION=dev
|
OPERATION=uwsgi
|
||||||
|
|
||||||
|
# Elasticsearch settings
|
||||||
|
ELASTICSEARCH_URL=10.1.0.1
|
||||||
|
ELASTICSEARCH_PORT=9200
|
||||||
|
ELASTICSEARCH_TLS=True
|
||||||
|
ELASTICSEARCH_USERNAME=admin
|
||||||
|
ELASTICSEARCH_PASSWORD=secret
|
||||||
|
|
||||||
|
# Manticore settings
|
||||||
|
MANTICORE_URL=http://127.0.0.1:9308
|
||||||
|
|
||||||
|
# Database settings
|
||||||
|
DB_BACKEND=MANTICORE
|
||||||
|
INDEX_MAIN=main
|
||||||
|
INDEX_RESTRICTED=restricted
|
||||||
|
INDEX_META=meta
|
||||||
|
INDEX_INT=internal
|
||||||
|
INDEX_RULE_STORAGE=rule_storage
|
||||||
|
|
||||||
|
MAIN_SIZES=1,5,15,30,50,100,250,500,1000
|
||||||
|
MAIN_SIZES_ANON=1,5,15,30,50,100
|
||||||
|
MAIN_SOURCES=dis,4ch,all
|
||||||
|
SOURCES_RESTRICTED=irc
|
||||||
|
CACHE=True
|
||||||
|
CACHE_TIMEOUT=2
|
||||||
|
|
||||||
|
# Drilldown settings
|
||||||
|
DRILLDOWN_RESULTS_PER_PAGE=15
|
||||||
|
DRILLDOWN_DEFAULT_SIZE=15
|
||||||
|
DRILLDOWN_DEFAULT_INDEX=main
|
||||||
|
DRILLDOWN_DEFAULT_SORTING=desc
|
||||||
|
DRILLDOWN_DEFAULT_SOURCE=all
|
||||||
|
|
||||||
|
# URLs
|
||||||
|
DOMAIN=spy.zm.is
|
||||||
|
URL=https://spy.zm.is
|
||||||
|
|
||||||
|
# Access control
|
||||||
|
ALLOWED_HOSTS=spy.zm.is
|
||||||
|
|
||||||
|
# CSRF
|
||||||
|
CSRF_TRUSTED_ORIGINS=https://spy.zm.is
|
||||||
|
|
||||||
|
# Stripe settings
|
||||||
|
BILLING_ENABLED=False
|
||||||
|
STRIPE_TEST=True
|
||||||
|
STRIPE_API_KEY_TEST=
|
||||||
|
STRIPE_PUBLIC_API_KEY_TEST=
|
||||||
|
STRIPE_API_KEY_PROD=
|
||||||
|
STRIPE_PUBLIC_API_KEY_PROD=
|
||||||
|
STRIPE_ENDPOINT_SECRET=
|
||||||
|
STRIPE_ADMIN_COUPON=
|
||||||
|
|
||||||
|
# Threshold settings
|
||||||
|
THRESHOLD_ENDPOINT=http://threshold:13869
|
||||||
|
THRESHOLD_API_KEY=api_1
|
||||||
|
THRESHOLD_API_TOKEN=
|
||||||
|
THRESHOLD_API_COUNTER=
|
||||||
|
|
||||||
|
# NickTrace settings
|
||||||
|
NICKTRACE_MAX_ITERATIONS=4
|
||||||
|
NICKTRACE_MAX_CHUNK_SIZE=500
|
||||||
|
NICKTRACE_QUERY_SIZE=10000
|
||||||
|
|
||||||
|
# Meta settings
|
||||||
|
META_MAX_ITERATIONS=4
|
||||||
|
META_MAX_CHUNK_SIZE=500
|
||||||
|
META_QUERY_SIZE=10000
|
||||||
|
|
||||||
|
# Debugging and profiling
|
||||||
|
DEBUG=n
|
||||||
|
PROFILER=False
|
||||||
|
|
||||||
|
# Redis settings
|
||||||
|
REDIS_HOST=redis_neptune
|
||||||
|
REDIS_PASSWORD=changeme
|
||||||
|
REDIS_DB=1
|
||||||
|
REDIS_DB_CACHE=10
|
||||||
|
REDIS_PORT=6379
|
||||||
|
|||||||
Reference in New Issue
Block a user