Update to run with Podman

This commit is contained in:
2024-12-29 17:37:23 +00:00
parent a519a4ce5e
commit 86fb2ac593
27 changed files with 1594 additions and 278 deletions

View File

@@ -1,20 +1,20 @@
run: run:
docker-compose --env-file=stack.env up -d docker-compose -f docker-compose.prod.yml --env-file=stack.env up -d
build: build:
docker-compose --env-file=stack.env build docker-compose -f docker-compose.prod.yml --env-file=stack.env build
stop: stop:
docker-compose --env-file=stack.env down docker-compose -f docker-compose.prod.yml --env-file=stack.env down
log: log:
docker-compose --env-file=stack.env logs -f docker-compose -f docker-compose.prod.yml --env-file=stack.env logs -f
migrate: migrate:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations: makemigrations:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth: auth:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser" docker-compose -f docker-compose.prod.yml --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"

View File

@@ -1,3 +1,5 @@
from os import getenv
# Elasticsearch settings # Elasticsearch settings
ELASTICSEARCH_URL = "10.1.0.1" ELASTICSEARCH_URL = "10.1.0.1"
ELASTICSEARCH_PORT = 9200 ELASTICSEARCH_PORT = 9200
@@ -104,3 +106,8 @@ META_QUERY_SIZE = 10000
DEBUG = True DEBUG = True
PROFILER = False PROFILER = False
REDIS_HOST = getenv("REDIS_HOST", "redis_fisk_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))

87
app/local_settings.py Normal file
View File

@@ -0,0 +1,87 @@
from os import getenv
trues = ("t", "true", "yes", "y", "1")
# Elasticsearch settings
ELASTICSEARCH_URL = getenv("ELASTICSEARCH_URL", "10.1.0.1")
ELASTICSEARCH_PORT = int(getenv("ELASTICSEARCH_PORT", "9200"))
ELASTICSEARCH_TLS = getenv("ELASTICSEARCH_TLS", "True").lower() in trues
ELASTICSEARCH_USERNAME = getenv("ELASTICSEARCH_USERNAME", "admin")
ELASTICSEARCH_PASSWORD = getenv("ELASTICSEARCH_PASSWORD", "secret")
# Manticore settings
MANTICORE_URL = getenv("MANTICORE_URL", "http://example-db-1:9308")
DB_BACKEND = getenv("DB_BACKEND", "MANTICORE")
# Common DB settings
INDEX_MAIN = getenv("INDEX_MAIN", "main")
INDEX_RESTRICTED = getenv("INDEX_RESTRICTED", "restricted")
INDEX_META = getenv("INDEX_META", "meta")
INDEX_INT = getenv("INDEX_INT", "internal")
INDEX_RULE_STORAGE = getenv("INDEX_RULE_STORAGE", "rule_storage")
MAIN_SIZES = getenv("MAIN_SIZES", "1,5,15,30,50,100,250,500,1000").split(",")
MAIN_SIZES_ANON = getenv("MAIN_SIZES_ANON", "1,5,15,30,50,100").split(",")
MAIN_SOURCES = getenv("MAIN_SOURCES", "dis,4ch,all").split(",")
SOURCES_RESTRICTED = getenv("SOURCES_RESTRICTED", "irc").split(",")
CACHE = getenv("CACHE", "False").lower() in trues
CACHE_TIMEOUT = int(getenv("CACHE_TIMEOUT", "2"))
DRILLDOWN_RESULTS_PER_PAGE = int(getenv("DRILLDOWN_RESULTS_PER_PAGE", "15"))
DRILLDOWN_DEFAULT_PARAMS = {
"size": getenv("DRILLDOWN_DEFAULT_SIZE", "15"),
"index": getenv("DRILLDOWN_DEFAULT_INDEX", "main"),
"sorting": getenv("DRILLDOWN_DEFAULT_SORTING", "desc"),
"source": getenv("DRILLDOWN_DEFAULT_SOURCE", "all"),
}
# URLs
DOMAIN = getenv("DOMAIN", "example.com")
URL = getenv("URL", f"https://{DOMAIN}")
# Access control
ALLOWED_HOSTS = getenv("ALLOWED_HOSTS", f"127.0.0.1,{DOMAIN}").split(",")
# CSRF
CSRF_TRUSTED_ORIGINS = getenv("CSRF_TRUSTED_ORIGINS", URL).split(",")
# Stripe
BILLING_ENABLED = getenv("BILLING_ENABLED", "false").lower() in trues
STRIPE_TEST = getenv("STRIPE_TEST", "True").lower() in trues
STRIPE_API_KEY_TEST = getenv("STRIPE_API_KEY_TEST", "")
STRIPE_PUBLIC_API_KEY_TEST = getenv("STRIPE_PUBLIC_API_KEY_TEST", "")
STRIPE_API_KEY_PROD = getenv("STRIPE_API_KEY_PROD", "")
STRIPE_PUBLIC_API_KEY_PROD = getenv("STRIPE_PUBLIC_API_KEY_PROD", "")
STRIPE_ENDPOINT_SECRET = getenv("STRIPE_ENDPOINT_SECRET", "")
STATIC_ROOT = getenv("STATIC_ROOT", "")
SECRET_KEY = getenv("SECRET_KEY", "a")
STRIPE_ADMIN_COUPON = getenv("STRIPE_ADMIN_COUPON", "")
# Threshold
THRESHOLD_ENDPOINT = getenv("THRESHOLD_ENDPOINT", "http://threshold:13869")
THRESHOLD_API_KEY = getenv("THRESHOLD_API_KEY", "api_1")
THRESHOLD_API_TOKEN = getenv("THRESHOLD_API_TOKEN", "")
THRESHOLD_API_COUNTER = getenv("THRESHOLD_API_COUNTER", "")
# NickTrace
NICKTRACE_MAX_ITERATIONS = int(getenv("NICKTRACE_MAX_ITERATIONS", "4"))
NICKTRACE_MAX_CHUNK_SIZE = int(getenv("NICKTRACE_MAX_CHUNK_SIZE", "500"))
NICKTRACE_QUERY_SIZE = int(getenv("NICKTRACE_QUERY_SIZE", "10000"))
# Meta
META_MAX_ITERATIONS = int(getenv("META_MAX_ITERATIONS", "4"))
META_MAX_CHUNK_SIZE = int(getenv("META_MAX_CHUNK_SIZE", "500"))
META_QUERY_SIZE = int(getenv("META_QUERY_SIZE", "10000"))
DEBUG = getenv("DEBUG", "True").lower() in trues
PROFILER = getenv("PROFILER", "False").lower() in trues
REDIS_HOST = getenv("REDIS_HOST", "redis_neptune_dev")
REDIS_PASSWORD = getenv("REDIS_PASSWORD", "changeme")
REDIS_DB = int(getenv("REDIS_DB", "1"))
REDIS_DB_CACHE = int(getenv("REDIS_DB_CACHE", "10"))
REDIS_PORT = int(getenv("REDIS_PORT", "6379"))
# Elasticsearch blacklist
ELASTICSEARCH_BLACKLISTED = {}

View File

@@ -47,19 +47,6 @@ INSTALLED_APPS = [
"cachalot", "cachalot",
] ]
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "unix:///var/run/socks/redis.sock",
"OPTIONS": {
"db": "10",
# "parser_class": "django_redis.cache.RedisCache",
"pool_class": "redis.BlockingConnectionPool",
},
}
}
CRISPY_TEMPLATE_PACK = "bulma" CRISPY_TEMPLATE_PACK = "bulma"
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",) CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)
DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html" DJANGO_TABLES2_TEMPLATE = "django-tables2/bulma.html"
@@ -163,7 +150,7 @@ REST_FRAMEWORK = {
INTERNAL_IPS = [ INTERNAL_IPS = [
"127.0.0.1", "127.0.0.1",
"10.1.10.11", # "10.1.10.11",
] ]
DEBUG_TOOLBAR_PANELS = [ DEBUG_TOOLBAR_PANELS = [
@@ -187,6 +174,21 @@ DEBUG_TOOLBAR_PANELS = [
from app.local_settings import * # noqa from app.local_settings import * # noqa
# Performance optimisations
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
# "LOCATION": "unix:///var/run/socks/redis.sock",
"LOCATION": f"redis://{REDIS_HOST}:{REDIS_PORT}",
"OPTIONS": {
"db": REDIS_DB_CACHE,
# "parser_class": "django_redis.cache.RedisCache",
"PASSWORD": REDIS_PASSWORD,
"pool_class": "redis.BlockingConnectionPool",
},
}
}
if PROFILER: # noqa - trust me its there if PROFILER: # noqa - trust me its there
import pyroscope import pyroscope

View File

@@ -58,6 +58,9 @@ from core.views.manage.threshold.threshold import (
ThresholdIRCOverview, ThresholdIRCOverview,
) )
# Stats
from core.views.manage.monolith import stats
# Main tool pages # Main tool pages
from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown, from core.views.ui.drilldown import ( # DrilldownTableView,; Drilldown,
DrilldownContextModal, DrilldownContextModal,
@@ -311,4 +314,14 @@ urlpatterns = [
notifications.RuleClear.as_view(), notifications.RuleClear.as_view(),
name="rule_clear", name="rule_clear",
), ),
path(
"manage/monolith/stats/",
stats.MonolithStats.as_view(),
name="monolith_stats",
),
path(
"manage/monolith/stats_db/<str:type>/",
stats.MonolithDBStats.as_view(),
name="monolith_stats_db",
)
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)

View File

@@ -7,7 +7,12 @@ from redis import StrictRedis
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true" os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) r = StrictRedis(
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
password=settings.REDIS_PASSWORD,
db=settings.REDIS_DB
)
if settings.STRIPE_TEST: if settings.STRIPE_TEST:
stripe.api_key = settings.STRIPE_API_KEY_TEST stripe.api_key = settings.STRIPE_API_KEY_TEST

View File

@@ -168,6 +168,71 @@ class StorageBackend(ABC):
# Actually get rid of all the things we set to None # Actually get rid of all the things we set to None
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit] response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
def add_bool(self, search_query, add_bool):
"""
Add the specified boolean matches to search query.
"""
if not add_bool:
return
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match_phrase": item})
def add_top(self, search_query, add_top, negative=False):
"""
Merge add_top with the base of the search_query.
"""
if not add_top:
return
if negative:
for item in add_top:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
else:
for item in add_top:
if "query" not in search_query:
search_query["query"] = {"bool": {"must": []}}
search_query["query"]["bool"]["must"].append(item)
def schedule_check_aggregations(self, rule_object, result_map):
"""
Check the results of a scheduled query for aggregations.
"""
if rule_object.aggs is None:
return result_map
for index, (meta, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta["aggs"]:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map
def query(self, user, search_query, **kwargs): def query(self, user, search_query, **kwargs):
# For time tracking # For time tracking
start = time.process_time() start = time.process_time()
@@ -188,6 +253,7 @@ class StorageBackend(ABC):
"took": time_took_rounded, "took": time_took_rounded,
"cache": True, "cache": True,
} }
print("S2", search_query)
response = self.run_query(user, search_query, **kwargs) response = self.run_query(user, search_query, **kwargs)
# For Elasticsearch # For Elasticsearch
@@ -198,7 +264,20 @@ class StorageBackend(ABC):
if "took" in response: if "took" in response:
if response["took"] is None: if response["took"] is None:
return None return None
if len(response["hits"]["hits"]) == 0: if "error" in response:
message = f"Error: {response['error']}"
message_class = "danger"
time_took = (time.process_time() - start) * 1000
# Round to 3 significant figures
time_took_rounded = round(
time_took, 3 - int(floor(log10(abs(time_took)))) - 1
)
return {
"message": message,
"class": message_class,
"took": time_took_rounded,
}
elif len(response["hits"]["hits"]) == 0:
message = "No results." message = "No results."
message_class = "danger" message_class = "danger"
time_took = (time.process_time() - start) * 1000 time_took = (time.process_time() - start) * 1000
@@ -213,7 +292,7 @@ class StorageBackend(ABC):
} }
# For Druid # For Druid
if "error" in response: elif "error" in response:
if "errorMessage" in response: if "errorMessage" in response:
context = { context = {
"message": response["errorMessage"], "message": response["errorMessage"],
@@ -240,6 +319,106 @@ class StorageBackend(ABC):
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1) time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
return {"object_list": response_parsed, "took": time_took_rounded} return {"object_list": response_parsed, "took": time_took_rounded}
def construct_context_query(
self, index, net, channel, src, num, size, type=None, nicks=None
):
# Get the initial query
query = self.construct_query(None, size, blank=True)
extra_must = []
extra_should = []
extra_should2 = []
if num:
extra_must.append({"match_phrase": {"num": num}})
if net:
extra_must.append({"match_phrase": {"net": net}})
if channel:
extra_must.append({"match": {"channel": channel}})
if nicks:
for nick in nicks:
extra_should2.append({"match": {"nick": nick}})
types = ["msg", "notice", "action", "kick", "topic", "mode"]
fields = [
"nick",
"ident",
"host",
"channel",
"ts",
"msg",
"type",
"net",
"src",
"tokens",
]
query["fields"] = fields
if index == "internal":
fields.append("mtype")
if channel == "*status" or type == "znc":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
# Type is one of msg or notice
# extra_should.append({"match": {"mtype": "msg"}})
# extra_should.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "znc"}})
extra_should.append({"match": {"type": "self"}})
extra_should2.append({"match": {"type": "znc"}})
extra_should2.append({"match": {"nick": channel}})
elif type == "auth":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
extra_should2.append({"match": {"nick": channel}})
# extra_should2.append({"match": {"mtype": "msg"}})
# extra_should2.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "query"}})
extra_should2.append({"match": {"type": "self"}})
extra_should.append({"match": {"nick": channel}})
else:
for ctype in types:
extra_should.append({"match": {"mtype": ctype}})
else:
for ctype in types:
extra_should.append({"match": {"type": ctype}})
# query = {
# "index": index,
# "limit": size,
# "query": {
# "bool": {
# "must": [
# # {"equals": {"src": src}},
# # {
# # "bool": {
# # "should": [*extra_should],
# # }
# # },
# # {
# # "bool": {
# # "should": [*extra_should2],
# # }
# # },
# *extra_must,
# ]
# }
# },
# "fields": fields,
# # "_source": False,
# }
if extra_must:
for x in extra_must:
query["query"]["bool"]["must"].append(x)
if extra_should:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2:
query["query"]["bool"]["must"].append(
{"bool": {"should": [*extra_should2]}}
)
return query
@abstractmethod @abstractmethod
def query_results(self, **kwargs): def query_results(self, **kwargs):
pass pass

View File

@@ -374,44 +374,6 @@ class ElasticsearchBackend(StorageBackend):
return search_query return search_query
def schedule_check_aggregations(self, rule_object, result_map):
"""
Check the results of a scheduled query for aggregations.
"""
if rule_object.aggs is None:
return result_map
for index, (meta, result) in result_map.items():
# Default to true, if no aggs are found, we still want to match
match = True
for agg_name, (operator, number) in rule_object.aggs.items():
if agg_name in meta["aggs"]:
agg_value = meta["aggs"][agg_name]["value"]
# TODO: simplify this, match is default to True
if operator == ">":
if agg_value > number:
match = True
else:
match = False
elif operator == "<":
if agg_value < number:
match = True
else:
match = False
elif operator == "=":
if agg_value == number:
match = True
else:
match = False
else:
match = False
else:
# No aggregation found, but it is required
match = False
result_map[index][0]["aggs"][agg_name]["match"] = match
return result_map
def schedule_query_results_test_sync(self, rule_object): def schedule_query_results_test_sync(self, rule_object):
""" """
Helper to run a scheduled query test with reduced functionality. Helper to run a scheduled query test with reduced functionality.

View File

@@ -1,12 +1,24 @@
import logging import logging
from datetime import datetime from datetime import datetime
from pprint import pprint from pprint import pprint
import httpx
import orjson
import requests import requests
from django.conf import settings from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results from core.db.processing import parse_results
from core.lib.parsing import (
QueryError,
parse_date_time,
parse_index,
parse_rule,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -21,17 +33,31 @@ class ManticoreBackend(StorageBackend):
""" """
pass # we use requests pass # we use requests
def construct_query(self, query, size, index, blank=False): async def async_initialise(self, **kwargs):
"""
Initialise the Manticore client in async mode
"""
pass # we use requests
def delete_rule_entries(self, rule_id):
"""
Delete all entries for a given rule.
:param rule_id: The rule ID to delete.
"""
# TODO
def construct_query(self, query, size=None, blank=False, **kwargs):
""" """
Accept some query parameters and construct an OpenSearch query. Accept some query parameters and construct an OpenSearch query.
""" """
if not size: if not size:
size = 5 size = 5
query_base = { query_base = {
"index": index, "index": kwargs.get("index"),
"limit": size, "limit": size,
"query": {"bool": {"must": []}}, "query": {"bool": {"must": []}},
} }
print("BASE", query_base)
query_string = { query_string = {
"query_string": query, "query_string": query,
} }
@@ -39,11 +65,81 @@ class ManticoreBackend(StorageBackend):
query_base["query"]["bool"]["must"].append(query_string) query_base["query"]["bool"]["must"].append(query_string)
return query_base return query_base
def run_query(self, client, user, search_query): def parse(self, response, **kwargs):
parsed = parse_results(response, **kwargs)
return parsed
def run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query.
"""
index = kwargs.get("index")
raw = kwargs.get("raw")
if search_query and not raw:
search_query["index"] = index
pprint(search_query)
path = kwargs.get("path", "json/search")
if raw:
response = requests.post( response = requests.post(
f"{settings.MANTICORE_URL}/{path}", search_query
)
else:
response = requests.post(
f"{settings.MANTICORE_URL}/{path}", json=search_query
)
return orjson.loads(response.text)
async def async_run_query(self, user, search_query, **kwargs):
"""
Low level helper to run Manticore query asynchronously.
"""
index = kwargs.get("index")
search_query["index"] = index
pprint(search_query)
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query f"{settings.MANTICORE_URL}/json/search", json=search_query
) )
return response return orjson.loads(response.text)
async def async_store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def store_matches(self, matches):
"""
Store a list of matches in Manticore.
:param index: The index to store the matches in.
:param matches: A list of matches to store.
"""
# TODO
def prepare_schedule_query(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality.
"""
# TODO
def schedule_query_results_test_sync(self, rule_object):
"""
Helper to run a scheduled query test with reduced functionality.
Sync version for running from Django forms.
Does not return results.
"""
# TODO
async def schedule_query_results(self, rule_object):
"""
Helper to run a scheduled query with reduced functionality and async.
"""
# TODO
def query_results( def query_results(
self, self,
@@ -67,117 +163,77 @@ class ManticoreBackend(StorageBackend):
query_created = False query_created = False
source = None source = None
add_defaults(query_params) add_defaults(query_params)
# Check size
# Now, run the helpers for SIQTSRSS/ADR
# S - Size
# I - Index
# Q - Query
# T - Tags
# S - Source
# R - Ranges
# S - Sort
# S - Sentiment
# A - Annotate
# D - Dedup
# R - Reverse
# S - Size
if request.user.is_anonymous: if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON sizes = settings.MAIN_SIZES_ANON
else: else:
sizes = settings.MANTICORE_MAIN_SIZES sizes = settings.MAIN_SIZES
if not size: if not size:
if "size" in query_params: size = parse_size(query_params, sizes)
size = query_params["size"] if isinstance(size, dict):
if size not in sizes: return size
message = "Size is not permitted"
message_class = "danger" rule_object = parse_rule(request.user, query_params)
return {"message": message, "class": message_class} if isinstance(rule_object, dict):
size = int(size) return rule_object
if rule_object is not None:
index = settings.INDEX_RULE_STORAGE
add_bool.append({"rule_id": str(rule_object.id)})
else: else:
size = 20 # I - Index
index = parse_index(request.user, query_params)
if isinstance(index, dict):
return index
# Check index # Q/T - Query/Tags
if "index" in query_params: search_query = self.parse_query(
index = query_params["index"] query_params, tags, size, custom_query, add_bool
if index == "main": )
index = settings.MANTICORE_INDEX_MAIN # Query should be a dict, so check if it contains message here
else: if "message" in search_query:
if not request.user.has_perm(f"core.index_{index}"): return search_query
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.MANTICORE_INDEX_MAIN
# Create the search query # S - Sources
if "query" in query_params: sources = parse_source(request.user, query_params)
query = query_params["query"] if isinstance(sources, dict):
search_query = self.construct_query(query, size, index) return sources
query_created = True total_count = len(sources)
else: # Total is -1 due to the "all" source
if custom_query: total_sources = (
search_query = custom_query len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, index, blank=True)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
# Check for a source
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
# If the sources the user has access to are equal to all
# possible sources, then we don't need to add the source
# filter to the query.
if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}} add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources: for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}}) add_top_tmp["bool"]["should"].append(
total_count += 1 {"match_phrase": {"src": source_iter}}
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED) )
if not total_count == total_sources: if query_params["source"] != "all":
add_top.append(add_top_tmp) add_top.append(add_top_tmp)
# Date/time range # R - Ranges
if set({"from_date", "to_date", "from_time", "to_time"}).issubset( # date_query = False
query_params.keys() from_ts, to_ts = parse_date_time(query_params)
): if from_ts:
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
range_query = { range_query = {
"range": { "range": {
"ts": { "ts": {
@@ -188,115 +244,87 @@ class ManticoreBackend(StorageBackend):
} }
add_top.append(range_query) add_top.append(range_query)
# Sorting # S - Sort
if "sorting" in query_params: sort = parse_sort(query_params)
sorting = query_params["sorting"] if isinstance(sort, dict):
if sorting not in ("asc", "desc", "none"): return sort
message = "Invalid sort"
message_class = "danger" if rule_object is not None:
return {"message": message, "class": message_class} field = "match_ts"
if sorting in ("asc", "desc"): else:
sort = [ field = "ts"
if sort:
# For Druid compatibility
sort_map = {"ascending": "asc", "descending": "desc"}
sorting = [
{ {
"ts": { field: {
"order": sorting, "order": sort_map[sort],
} }
} }
] ]
search_query["sort"] = sorting
# Sentiment handling # S - Sentiment
if "check_sentiment" in query_params: sentiment_r = parse_sentiment(query_params)
if "sentiment_method" not in query_params: if isinstance(sentiment_r, dict):
message = "No sentiment method" return sentiment_r
message_class = "danger" if sentiment_r:
return {"message": message, "class": message_class} if rule_object is not None:
if "sentiment" in query_params: sentiment_index = "meta.aggs.avg_sentiment.value"
sentiment = query_params["sentiment"] else:
try: sentiment_index = "sentiment"
sentiment = float(sentiment) sentiment_method, sentiment = sentiment_r
except ValueError: range_query_compare = {"range": {sentiment_index: {}}}
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = { range_query_precise = {
"match": { "match": {
"sentiment": None, sentiment_index: None,
} }
} }
if sentiment_method == "below": if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment range_query_compare["range"][sentiment_index]["lt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "above": elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment range_query_compare["range"][sentiment_index]["gt"] = sentiment
add_top.append(range_query_compare) add_top.append(range_query_compare)
elif sentiment_method == "exact": elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment range_query_precise["match"][sentiment_index] = sentiment
add_top.append(range_query_precise) add_top.append(range_query_precise)
elif sentiment_method == "nonzero": elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0 range_query_precise["match"][sentiment_index] = 0
add_top_negative.append(range_query_precise) add_top_negative.append(range_query_precise)
if add_bool: # Add in the additional information we already populated
# if "bool" not in search_query["query"]: self.add_bool(search_query, add_bool)
# search_query["query"]["bool"] = {} self.add_top(search_query, add_top)
# if "must" not in search_query["query"]["bool"]: self.add_top(search_query, add_top_negative, negative=True)
# search_query["query"]["bool"] = {"must": []}
for item in add_bool: response = self.query(
search_query["query"]["bool"]["must"].append({"match": item}) request.user,
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
search_query, search_query,
index=index,
) )
if not results: if not response:
message = "Error running query" message = "Error running query"
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
# results = results.to_dict() # results = results.to_dict()
if "error" in results: if "error" in response:
message = results["error"] message = response["error"]
message_class = "danger" message_class = "danger"
return {"message": message, "class": message_class} return {"message": message, "class": message_class}
results_parsed = parse_results(results) if "message" in response:
if annotate: return response
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse: # A/D/R - Annotate/Dedup/Reverse
results_parsed = results_parsed[::-1] response["object_list"] = self.process_results(
response["object_list"],
annotate=annotate,
dedup=dedup,
dedup_fields=dedup_fields,
reverse=reverse,
)
if dedup: context = response
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
return context return context

302
core/db/manticore_orig.py Normal file
View File

@@ -0,0 +1,302 @@
import logging
from datetime import datetime
from pprint import pprint
import requests
from django.conf import settings
from core.db import StorageBackend, add_defaults, dedup_list
from core.db.processing import annotate_results, parse_results
logger = logging.getLogger(__name__)
class ManticoreBackend(StorageBackend):
def __init__(self):
super().__init__("manticore")
def initialise(self, **kwargs):
"""
Initialise the Manticore client
"""
pass # we use requests
def construct_query(self, query, size, index, blank=False):
"""
Accept some query parameters and construct an OpenSearch query.
"""
if not size:
size = 5
query_base = {
"index": index,
"limit": size,
"query": {"bool": {"must": []}},
}
query_string = {
"query_string": query,
}
if not blank:
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def run_query(self, client, user, search_query):
response = requests.post(
f"{settings.MANTICORE_URL}/json/search", json=search_query
)
return response
def query_results(
self,
request,
query_params,
size=None,
annotate=True,
custom_query=False,
reverse=False,
dedup=False,
dedup_fields=None,
tags=None,
):
query = None
message = None
message_class = None
add_bool = []
add_top = []
add_top_negative = []
sort = None
query_created = False
source = None
add_defaults(query_params)
# Check size
if request.user.is_anonymous:
sizes = settings.MANTICORE_MAIN_SIZES_ANON
else:
sizes = settings.MANTICORE_MAIN_SIZES
if not size:
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
size = int(size)
else:
size = 20
# Check index
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.MANTICORE_INDEX_MAIN
else:
if not request.user.has_perm(f"core.index_{index}"):
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.MANTICORE_INDEX_META
elif index == "internal":
index = settings.MANTICORE_INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.MANTICORE_INDEX_MAIN
# Create the search query
if "query" in query_params:
query = query_params["query"]
search_query = self.construct_query(query, size, index)
query_created = True
else:
if custom_query:
search_query = custom_query
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, index, blank=True)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
# Check for a source
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = list(settings.MAIN_SOURCES)
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
add_top_tmp = {"bool": {"should": []}}
total_count = 0
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
total_count += 1
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if not total_count == total_sources:
add_top.append(add_top_tmp)
# Date/time range
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
from_ts = datetime.strptime(from_ts, "%Y-%m-%dT%H:%MZ")
to_ts = datetime.strptime(to_ts, "%Y-%m-%dT%H:%MZ")
from_ts = int(from_ts.timestamp())
to_ts = int(to_ts.timestamp())
range_query = {
"range": {
"ts": {
"gt": from_ts,
"lt": to_ts,
}
}
}
add_top.append(range_query)
# Sorting
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
{
"ts": {
"order": sorting,
}
}
]
# Sentiment handling
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = {
"match": {
"sentiment": None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
add_top_negative.append(range_query_precise)
if add_bool:
# if "bool" not in search_query["query"]:
# search_query["query"]["bool"] = {}
# if "must" not in search_query["query"]["bool"]:
# search_query["query"]["bool"] = {"must": []}
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match": item})
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
pprint(search_query)
results = self.run_query(
self.client,
request.user, # passed through run_main_query to filter_blacklisted
search_query,
)
if not results:
message = "Error running query"
message_class = "danger"
return {"message": message, "class": message_class}
# results = results.to_dict()
if "error" in results:
message = results["error"]
message_class = "danger"
return {"message": message, "class": message_class}
results_parsed = parse_results(results)
if annotate:
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse:
results_parsed = results_parsed[::-1]
if dedup:
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
context = {
"object_list": results_parsed,
"card": results["hits"]["total"],
"took": results["took"],
}
if "cache" in results:
context["cache"] = results["cache"]
return context

View File

@@ -1,5 +1,5 @@
from datetime import datetime from datetime import datetime
import ast
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
@@ -92,6 +92,11 @@ def parse_results(results, meta=None):
for field in list(element.keys()): for field in list(element.keys()):
if element[field] == "": if element[field] == "":
del element[field] del element[field]
# Unfold the tokens
if "tokens" in element:
if element["tokens"].startswith('["'):
tokens_parsed = ast.literal_eval(element["tokens"])
element["tokens"] = tokens_parsed
# Split the timestamp into date and time # Split the timestamp into date and time
if "ts" not in element: if "ts" not in element:

View File

@@ -4,7 +4,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should = [] extra_should = []
extra_should2 = [] extra_should2 = []
if num: if num:
extra_must.append({"match_phrase": {"num": num}}) extra_must.append({"equals": {"num": num}})
if net: if net:
extra_must.append({"match_phrase": {"net": net}}) extra_must.append({"match_phrase": {"net": net}})
if channel: if channel:
@@ -52,7 +52,7 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
extra_should.append({"match": {"nick": channel}}) extra_should.append({"match": {"nick": channel}})
else: else:
for ctype in types: for ctype in types:
extra_should.append({"match": {"mtype": ctype}}) extra_should.append({"equals": {"mtype": ctype}})
else: else:
for ctype in types: for ctype in types:
extra_should.append({"match": {"type": ctype}}) extra_should.append({"match": {"type": ctype}})
@@ -84,4 +84,6 @@ def construct_query(index, net, channel, src, num, size, type=None, nicks=None):
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}}) query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2: if extra_should2:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}}) query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should2]}})
print("CONTEXT QUERY", query)
return query return query

View File

@@ -90,6 +90,8 @@ def parse_index(user, query_params, raise_error=False):
} }
else: else:
index = settings.INDEX_MAIN index = settings.INDEX_MAIN
print("GOT INDEX", index)
return index return index

View File

@@ -335,7 +335,8 @@ class NotificationRuleData(object):
if not isinstance(matches, list): if not isinstance(matches, list):
matches = [matches] matches = [matches]
matches_copy = matches.copy() matches_copy = matches.copy()
match_ts = datetime.utcnow().isoformat() # match_ts = datetime.utcnow().isoformat()
match_ts = int(datetime.utcnow().timestamp())
batch_id = uuid.uuid4() batch_id = uuid.uuid4()
# Filter empty fields in meta # Filter empty fields in meta

View File

@@ -1,5 +1,6 @@
import msgpack import msgpack
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.conf import settings
from redis import StrictRedis from redis import StrictRedis
from core.db.storage import db from core.db.storage import db
@@ -93,7 +94,12 @@ def process_rules(data):
class Command(BaseCommand): class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0) r = StrictRedis(
host=settings.REDIS_HOST,
port=settings.REDIS_PORT,
password=settings.REDIS_PASSWORD,
db=settings.REDIS_DB
)
p = r.pubsub() p = r.pubsub()
p.psubscribe("messages") p.psubscribe("messages")
for message in p.listen(): for message in p.listen():

View File

@@ -78,6 +78,7 @@ class User(AbstractUser):
""" """
Override the save function to create a Stripe customer. Override the save function to create a Stripe customer.
""" """
if settings.BILLING_ENABLED:
if not self.stripe_id: # stripe ID not stored if not self.stripe_id: # stripe ID not stored
self.stripe_id = get_or_create(self.email, self.first_name, self.last_name) self.stripe_id = get_or_create(self.email, self.first_name, self.last_name)
@@ -89,11 +90,13 @@ class User(AbstractUser):
if self.last_name != self._original.last_name: if self.last_name != self._original.last_name:
to_update["last_name"] = self.last_name to_update["last_name"] = self.last_name
if settings.BILLING_ENABLED:
update_customer_fields(self.stripe_id, **to_update) update_customer_fields(self.stripe_id, **to_update)
super().save(*args, **kwargs) super().save(*args, **kwargs)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
if settings.BILLING_ENABLED:
if self.stripe_id: if self.stripe_id:
stripe.Customer.delete(self.stripe_id) stripe.Customer.delete(self.stripe_id)
logger.info(f"Deleted Stripe customer {self.stripe_id}") logger.info(f"Deleted Stripe customer {self.stripe_id}")

View File

@@ -280,7 +280,7 @@
{% if user.is_superuser %} {% if user.is_superuser %}
<div class="navbar-item has-dropdown is-hoverable"> <div class="navbar-item has-dropdown is-hoverable">
<a class="navbar-link"> <a class="navbar-link">
Threshold Manage
</a> </a>
<div class="navbar-dropdown"> <div class="navbar-dropdown">
@@ -290,6 +290,9 @@
<a class="navbar-item" href="#"> <a class="navbar-item" href="#">
Discord Discord
</a> </a>
<a class="navbar-item" href="{% url 'monolith_stats' %}">
Stats
</a>
</div> </div>
</div> </div>
{% endif %} {% endif %}

View File

@@ -0,0 +1,15 @@
{% extends "base.html" %}
{% block content %}
<div
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-get="{% url 'monolith_stats_db' type='page' %}"
hx-trigger="load, every 5s"
hx-target="#stats"
hx-swap="innerHTML">
</div>
<div class="box">
<div id="stats">
</div>
</div>
{% endblock %}

View File

@@ -0,0 +1,14 @@
{% extends 'mixins/partials/generic-detail.html' %}
{% block tbody %}
{% for item in object %}
{% if item.data %}
{% for row in item.data %}
<tr>
<th>{{ row.Variable_name }}</th>
<td>{{ row.Value }}</td>
</tr>
{% endfor %}
{% endif %}
{% endfor %}
{% endblock %}

View File

@@ -174,10 +174,11 @@
</td> </td>
{% elif column.name == 'match_ts' %} {% elif column.name == 'match_ts' %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">
{% with match_ts=cell|splitstr:'T' %} <!-- {# with match_ts=cell|splitstr:'T' %}
<p>{{ match_ts.0 }}</p> <p>{{ match_ts.0 }}</p>
<p>{{ match_ts.1 }}</p> <p>{{ match_ts.1 }}</p>
{% endwith %} {% endwith #} -->
<p>{{ match_ts }}</p>
</td> </td>
{% elif column.name == 'type' or column.name == 'mtype' %} {% elif column.name == 'type' or column.name == 'mtype' %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">

View File

@@ -5,4 +5,6 @@ register = template.Library()
@register.filter @register.filter
def splitstr(value, arg): def splitstr(value, arg):
if type(value) == int:
raise Exception(f"Attempt to split {value} with separator {arg}")
return value.split(arg) return value.split(arg)

View File

View File

@@ -0,0 +1,36 @@
from django.shortcuts import render
from django.views import View
from rest_framework.parsers import FormParser
from rest_framework.views import APIView
from core.db.storage import db
from mixins.views import ObjectRead
from core.views.manage.permissions import SuperUserRequiredMixin
class MonolithStats(SuperUserRequiredMixin, View):
template_name = "manage/monolith/stats/index.html"
def get(self, request):
return render(request, self.template_name)
class MonolithDBStats(SuperUserRequiredMixin, ObjectRead):
detail_template = "manage/monolith/stats/overview.html"
context_object_name_singular = "Status"
context_object_name = "Status"
detail_url_name = "monolith_stats_db"
detail_url_args = ["type"]
def get_object(self, **kwargs):
search_query = "SHOW TABLE main STATUS"
stats = db.run_query(
self.request.user,
search_query=search_query,
path="sql?mode=raw",
raw=True,
#method="get",
)
return stats

561
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,561 @@
version: "2.2"
services:
app:
image: pathogen/neptune:latest
container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- neptune_static:${STATIC_ROOT}
# env_file:
# - stack.env
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- xf
- db
processing:
image: pathogen/neptune:latest
container_name: processing_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py processing'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- neptune_static:${STATIC_ROOT}
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- xf
- db
scheduling:
image: pathogen/neptune:latest
container_name: scheduling_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py scheduling'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- neptune_static:${STATIC_ROOT}
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
collectstatic:
condition: service_started
networks:
- default
- xf
- db
migration:
image: pathogen/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- neptune_static:${STATIC_ROOT}
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
# volumes_from:
# - tmp
depends_on:
redis:
condition: service_healthy
networks:
- default
- xf
- db
collectstatic:
image: pathogen/neptune:latest
container_name: collectstatic_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/conf/db.sqlite3
- neptune_static:${STATIC_ROOT}
# volumes_from:
# - tmp
environment:
# General application settings
APP_PORT: "${APP_PORT}"
PORTAINER_GIT_DIR: "${PORTAINER_GIT_DIR}"
APP_LOCAL_SETTINGS: "${APP_LOCAL_SETTINGS}"
APP_DATABASE_FILE: "${APP_DATABASE_FILE}"
STATIC_ROOT: "${STATIC_ROOT}"
OPERATION: "${OPERATION}"
# Elasticsearch settings
ELASTICSEARCH_URL: "${ELASTICSEARCH_URL}"
ELASTICSEARCH_PORT: "${ELASTICSEARCH_PORT}"
ELASTICSEARCH_TLS: "${ELASTICSEARCH_TLS}"
ELASTICSEARCH_USERNAME: "${ELASTICSEARCH_USERNAME}"
ELASTICSEARCH_PASSWORD: "${ELASTICSEARCH_PASSWORD}"
# Manticore settings
MANTICORE_URL: "${MANTICORE_URL}"
# Database settings
DB_BACKEND: "${DB_BACKEND}"
INDEX_MAIN: "${INDEX_MAIN}"
INDEX_RESTRICTED: "${INDEX_RESTRICTED}"
INDEX_META: "${INDEX_META}"
INDEX_INT: "${INDEX_INT}"
INDEX_RULE_STORAGE: "${INDEX_RULE_STORAGE}"
MAIN_SIZES: "${MAIN_SIZES}"
MAIN_SIZES_ANON: "${MAIN_SIZES_ANON}"
MAIN_SOURCES: "${MAIN_SOURCES}"
SOURCES_RESTRICTED: "${SOURCES_RESTRICTED}"
CACHE: "${CACHE}"
CACHE_TIMEOUT: "${CACHE_TIMEOUT}"
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE: "${DRILLDOWN_RESULTS_PER_PAGE}"
DRILLDOWN_DEFAULT_SIZE: "${DRILLDOWN_DEFAULT_SIZE}"
DRILLDOWN_DEFAULT_INDEX: "${DRILLDOWN_DEFAULT_INDEX}"
DRILLDOWN_DEFAULT_SORTING: "${DRILLDOWN_DEFAULT_SORTING}"
DRILLDOWN_DEFAULT_SOURCE: "${DRILLDOWN_DEFAULT_SOURCE}"
# URLs: "${# URLs}"
DOMAIN: "${DOMAIN}"
URL: "${URL}"
# Access control
ALLOWED_HOSTS: "${ALLOWED_HOSTS}"
# CSRF
CSRF_TRUSTED_ORIGINS: "${CSRF_TRUSTED_ORIGINS}"
# Stripe settings
BILLING_ENABLED: "${BILLING_ENABLED}"
STRIPE_TEST: "${STRIPE_TEST}"
STRIPE_API_KEY_TEST: "${STRIPE_API_KEY_TEST}"
STRIPE_PUBLIC_API_KEY_TEST: "${STRIPE_PUBLIC_API_KEY_TEST}"
STRIPE_API_KEY_PROD: "${STRIPE_API_KEY_PROD}"
STRIPE_PUBLIC_API_KEY_PROD: "${STRIPE_PUBLIC_API_KEY_PROD}"
STRIPE_ENDPOINT_SECRET: "${STRIPE_ENDPOINT_SECRET}"
STRIPE_ADMIN_COUPON: "${STRIPE_ADMIN_COUPON}"
# Threshold settings
THRESHOLD_ENDPOINT: "${THRESHOLD_ENDPOINT}"
THRESHOLD_API_KEY: "${THRESHOLD_API_KEY}"
THRESHOLD_API_TOKEN: "${THRESHOLD_API_TOKEN}"
THRESHOLD_API_COUNTER: "${THRESHOLD_API_COUNTER}"
# NickTrace settings
NICKTRACE_MAX_ITERATIONS: "${NICKTRACE_MAX_ITERATIONS}"
NICKTRACE_MAX_CHUNK_SIZE: "${NICKTRACE_MAX_CHUNK_SIZE}"
NICKTRACE_QUERY_SIZE: "${NICKTRACE_QUERY_SIZE}"
# Meta settings
META_MAX_ITERATIONS: "${META_MAX_ITERATIONS}"
META_MAX_CHUNK_SIZE: "${META_MAX_CHUNK_SIZE}"
META_QUERY_SIZE: "${META_QUERY_SIZE}"
# Debugging and profiling
DEBUG: "${DEBUG}"
PROFILER: "${PROFILER}"
# Redis settings
REDIS_HOST: "${REDIS_HOST}"
REDIS_PASSWORD: "${REDIS_PASSWORD}"
REDIS_DB: "${REDIS_DB}"
REDIS_DB_CACHE: "${REDIS_DB_CACHE}"
REDIS_PORT: "${REDIS_PORT}"
depends_on:
redis:
condition: service_healthy
networks:
- default
- xf
- db
nginx:
image: nginx:latest
container_name: nginx_neptune
ports:
- ${APP_PORT}:9999
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- neptune_static:${STATIC_ROOT}
# volumes_from:
# - tmp
networks:
- default
- xf
depends_on:
app:
condition: service_started
# tmp:
# image: busybox
# container_name: tmp_neptune
# command: chmod -R 777 /var/run/socks
# volumes:
# - /var/run/socks
redis:
image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
- neptune_redis_data:/data
# volumes_from:
# - tmp
healthcheck:
test: "redis-cli ping"
interval: 2s
timeout: 2s
retries: 15
networks:
- default
- xf
networks:
default:
driver: bridge
xf:
external: true
db:
external: true
volumes:
neptune_static: {}
neptune_redis_data: {}

View File

@@ -1,5 +1,4 @@
unixsocket /var/run/socks/redis.sock # unixsocket /var/run/socks/redis.sock
unixsocketperm 777 # unixsocketperm 777
# For Monolith PubSub
port 6379 port 6379
requirepass changeme

View File

@@ -27,3 +27,4 @@ redis
hiredis hiredis
django-cachalot django-cachalot
django_redis django_redis
httpx

View File

@@ -1,6 +1,86 @@
# General application settings
APP_PORT=5000 APP_PORT=5000
PORTAINER_GIT_DIR=. PORTAINER_GIT_DIR=.
APP_LOCAL_SETTINGS=./app/local_settings.py APP_LOCAL_SETTINGS=./app/local_settings.py
APP_DATABASE_FILE=./db.sqlite3 APP_DATABASE_FILE=./db.sqlite3
STATIC_ROOT=/conf/static STATIC_ROOT=/conf/static
OPERATION=dev OPERATION=dev
# Elasticsearch settings
ELASTICSEARCH_URL=10.1.0.1
ELASTICSEARCH_PORT=9200
ELASTICSEARCH_TLS=True
ELASTICSEARCH_USERNAME=admin
ELASTICSEARCH_PASSWORD=secret
# Manticore settings
MANTICORE_URL=http://monolith_db:9308
# Database settings
DB_BACKEND=MANTICORE
INDEX_MAIN=main
INDEX_RESTRICTED=restricted
INDEX_META=meta
INDEX_INT=internal
INDEX_RULE_STORAGE=rule_storage
MAIN_SIZES=1,5,15,30,50,100,250,500,1000
MAIN_SIZES_ANON=1,5,15,30,50,100
MAIN_SOURCES=dis,4ch,all
SOURCES_RESTRICTED=irc
CACHE=True
CACHE_TIMEOUT=2
# Drilldown settings
DRILLDOWN_RESULTS_PER_PAGE=15
DRILLDOWN_DEFAULT_SIZE=15
DRILLDOWN_DEFAULT_INDEX=main
DRILLDOWN_DEFAULT_SORTING=desc
DRILLDOWN_DEFAULT_SOURCE=all
# URLs
DOMAIN=qi
URL=http://10.0.0.10:5000
# Access control
ALLOWED_HOSTS=127.0.0.1,localhost,10.0.0.10,qi
# CSRF
CSRF_TRUSTED_ORIGINS=http://127.0.0.1:5000,http://localhost:5000,http://qi:5000,http://10.0.0.10:5000
# Stripe settings
BILLING_ENABLED=False
STRIPE_TEST=True
STRIPE_API_KEY_TEST=
STRIPE_PUBLIC_API_KEY_TEST=
STRIPE_API_KEY_PROD=
STRIPE_PUBLIC_API_KEY_PROD=
STRIPE_ENDPOINT_SECRET=
STRIPE_ADMIN_COUPON=
# Threshold settings
THRESHOLD_ENDPOINT=http://threshold:13869
THRESHOLD_API_KEY=api_1
THRESHOLD_API_TOKEN=
THRESHOLD_API_COUNTER=
# NickTrace settings
NICKTRACE_MAX_ITERATIONS=4
NICKTRACE_MAX_CHUNK_SIZE=500
NICKTRACE_QUERY_SIZE=10000
# Meta settings
META_MAX_ITERATIONS=4
META_MAX_CHUNK_SIZE=500
META_QUERY_SIZE=10000
# Debugging and profiling
DEBUG=True
PROFILER=False
# Redis settings
REDIS_HOST=redis_neptune
REDIS_PASSWORD=changeme
REDIS_DB=1
REDIS_DB_CACHE=10
REDIS_PORT=6379