Compare commits

...

15 Commits

47 changed files with 1039 additions and 1359 deletions

View File

@@ -24,8 +24,7 @@ repos:
exclude : ^core/static/css # slow exclude : ^core/static/css # slow
- id: djjs - id: djjs
exclude: ^core/static/js # slow exclude: ^core/static/js # slow
# - repo: https://github.com/thibaudcolas/curlylint - repo: https://github.com/sirwart/ripsecrets.git
# rev: v0.13.1 rev: v0.1.5
# hooks: hooks:
# - id: curlylint - id: ripsecrets
# files: \.(html|sls)$

28
Dockerfile Normal file
View File

@@ -0,0 +1,28 @@
# syntax=docker/dockerfile:1
FROM python:3
ARG OPERATION
RUN useradd -d /code pathogen
RUN mkdir -p /code
RUN chown -R pathogen:pathogen /code
RUN mkdir -p /conf/static
RUN chown -R pathogen:pathogen /conf
RUN mkdir /venv
RUN chown pathogen:pathogen /venv
USER pathogen
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /code
COPY requirements.txt /code/
RUN python -m venv /venv
RUN . /venv/bin/activate && pip install -r requirements.txt
# CMD . /venv/bin/activate && uwsgi --ini /conf/uwsgi.ini
CMD if [ "$OPERATION" = "uwsgi" ] ; then . /venv/bin/activate && uwsgi --ini /conf/uwsgi.ini ; else . /venv/bin/activate && exec python manage.py runserver 0.0.0.0:8000; fi
# CMD . /venv/bin/activate && uvicorn --reload --reload-include *.html --workers 2 --uds /var/run/socks/app.sock app.asgi:application
# CMD . /venv/bin/activate && gunicorn -b 0.0.0.0:8000 --reload app.asgi:application -k uvicorn.workers.UvicornWorker

20
Makefile Normal file
View File

@@ -0,0 +1,20 @@
run:
docker-compose --env-file=stack.env up -d
build:
docker-compose --env-file=stack.env build
stop:
docker-compose --env-file=stack.env down
log:
docker-compose --env-file=stack.env logs -f
migrate:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py migrate"
makemigrations:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py makemigrations"
auth:
docker-compose --env-file=stack.env run --rm app sh -c ". /venv/bin/activate && python manage.py createsuperuser"

View File

@@ -1,40 +1,37 @@
# OpenSearch settings # Elasticsearch settings
OPENSEARCH_URL = "127.0.0.1" ELASTICSEARCH_URL = "10.1.0.1"
OPENSEARCH_PORT = 9200 ELASTICSEARCH_PORT = 9200
OPENSEARCH_TLS = True ELASTICSEARCH_TLS = True
OPENSEARCH_USERNAME = "admin" ELASTICSEARCH_USERNAME = "admin"
OPENSEARCH_PASSWORD = "" ELASTICSEARCH_PASSWORD = "secret"
OPENSEARCH_INDEX_MAIN = "pathogen-main"
OPENSEARCH_INDEX_META = "pathogen-meta"
OPENSEARCH_INDEX_INT = "pathogen-int"
OPENSEARCH_MAIN_SIZES = ["20", "50", "100", "200", "400", "800"]
OPENSEARCH_MAIN_SIZES_ANON = ["20", "50", "100"]
OPENSEARCH_MAIN_SOURCES = ["dis", "4ch", "all"]
OPENSEARCH_SOURCES_RESTRICTED = ["irc"]
# Manticore settings # Manticore settings
MANTICORE_URL = "http://monolith-db-1:9308" MANTICORE_URL = "http://example-db-1:9308"
MANTICORE_INDEX_MAIN = "main"
MANTICORE_INDEX_META = "meta"
MANTICORE_INDEX_INT = "internal"
MANTICORE_MAIN_SIZES = ["20", "50", "100", "200", "400", "800"] DB_BACKEND = "ELASTICSEARCH"
MANTICORE_MAIN_SIZES_ANON = ["20", "50", "100"]
MANTICORE_MAIN_SOURCES = ["dis", "4ch", "all"] # Common DB settings
MANTICORE_SOURCES_RESTRICTED = ["irc"] INDEX_MAIN = "main"
MANTICORE_CACHE = True INDEX_RESTRICTED = "restricted"
MANTICORE_CACHE_TIMEOUT = 60 INDEX_META = "meta"
INDEX_INT = "internal"
MAIN_SIZES = ["1", "5", "15", "30", "50", "100", "250", "500", "1000"]
MAIN_SIZES_ANON = ["1", "5", "15", "30", "50", "100"]
MAIN_SOURCES = ["dis", "4ch", "all"]
SOURCES_RESTRICTED = ["irc"]
CACHE = False
CACHE_TIMEOUT = 2
DRILLDOWN_RESULTS_PER_PAGE = 15 DRILLDOWN_RESULTS_PER_PAGE = 15
DRILLDOWN_DEFAULT_PARAMS = { DRILLDOWN_DEFAULT_PARAMS = {
"size": "20", "size": "15",
"index": "main", "index": "main",
"sorting": "desc", "sorting": "desc",
"source": "4ch", "source": "4ch",
} }
# Encryption # Encryption
# ENCRYPTION = False # ENCRYPTION = False
# ENCRYPTION_KEY = b"" # ENCRYPTION_KEY = b""
@@ -61,7 +58,7 @@ DRILLDOWN_DEFAULT_PARAMS = {
# # Delay results by this many days # # Delay results by this many days
# DELAY_DURATION = 10 # DELAY_DURATION = 10
OPENSEARCH_BLACKLISTED = {} ELASTICSEARCH_BLACKLISTED = {}
# URLs\ # URLs\
@@ -89,8 +86,8 @@ SECRET_KEY = "a"
STRIPE_ADMIN_COUPON = "" STRIPE_ADMIN_COUPON = ""
# Threshold # Threshold
THRESHOLD_ENDPOINT = "http://threshold-app-1:13869" THRESHOLD_ENDPOINT = "http://threshold:13869"
THRESHOLD_API_KEY = "" THRESHOLD_API_KEY = "api_1"
THRESHOLD_API_TOKEN = "" THRESHOLD_API_TOKEN = ""
THRESHOLD_API_COUNTER = "" THRESHOLD_API_COUNTER = ""
@@ -106,12 +103,3 @@ META_QUERY_SIZE = 10000
DEBUG = True DEBUG = True
PROFILER = False PROFILER = False
if DEBUG:
import socket # only if you haven't already imported this
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [
"127.0.0.1",
"10.0.2.2",
]

View File

@@ -2,7 +2,7 @@ import stripe
from django.conf import settings from django.conf import settings
from redis import StrictRedis from redis import StrictRedis
r = StrictRedis(unix_socket_path="/var/run/redis/redis.sock", db=0) r = StrictRedis(unix_socket_path="/var/run/socks/redis.sock", db=0)
if settings.STRIPE_TEST: if settings.STRIPE_TEST:
stripe.api_key = settings.STRIPE_API_KEY_TEST stripe.api_key = settings.STRIPE_API_KEY_TEST

View File

@@ -1,6 +1,7 @@
import random import random
import string import string
import time import time
from abc import ABC, abstractmethod
from datetime import datetime from datetime import datetime
from math import floor, log10 from math import floor, log10
@@ -14,7 +15,7 @@ from core.util import logs
from core.views import helpers from core.views import helpers
class StorageBackend(object): class StorageBackend(ABC):
def __init__(self, name): def __init__(self, name):
self.log = logs.get_logger(name) self.log = logs.get_logger(name)
self.log.info(f"Initialising storage backend {name}") self.log.info(f"Initialising storage backend {name}")
@@ -22,8 +23,9 @@ class StorageBackend(object):
self.initialise_caching() self.initialise_caching()
self.initialise() self.initialise()
@abstractmethod
def initialise(self, **kwargs): def initialise(self, **kwargs):
raise NotImplementedError pass
def initialise_caching(self): def initialise_caching(self):
hash_key = r.get("cache_hash_key") hash_key = r.get("cache_hash_key")
@@ -37,11 +39,13 @@ class StorageBackend(object):
self.log.debug(f"Decoded hash key: {hash_key}") self.log.debug(f"Decoded hash key: {hash_key}")
self.hash_key = hash_key self.hash_key = hash_key
@abstractmethod
def construct_query(self, **kwargs): def construct_query(self, **kwargs):
raise NotImplementedError pass
@abstractmethod
def run_query(self, **kwargs): def run_query(self, **kwargs):
raise NotImplementedError pass
def parse_size(self, query_params, sizes): def parse_size(self, query_params, sizes):
if "size" in query_params: if "size" in query_params:
@@ -93,22 +97,22 @@ class StorageBackend(object):
index = settings.INDEX_MAIN index = settings.INDEX_MAIN
return index return index
def parse_query(self, query_params, tags, size, index, custom_query, add_bool): def parse_query(self, query_params, tags, size, custom_query, add_bool, **kwargs):
query_created = False query_created = False
if "query" in query_params: if "query" in query_params:
query = query_params["query"] query = query_params["query"]
search_query = self.construct_query(query, size, index) search_query = self.construct_query(query, size, **kwargs)
query_created = True query_created = True
else: else:
if custom_query: if custom_query:
search_query = custom_query search_query = custom_query
else: else:
search_query = self.construct_query(None, size, index, blank=True) search_query = self.construct_query(None, size, blank=True, **kwargs)
if tags: if tags:
# Get a blank search query # Get a blank search query
if not query_created: if not query_created:
search_query = self.construct_query(None, size, index, blank=True) search_query = self.construct_query(None, size, blank=True, **kwargs)
query_created = True query_created = True
for item in tags: for item in tags:
for tagname, tagvalue in item.items(): for tagname, tagvalue in item.items():
@@ -217,7 +221,7 @@ class StorageBackend(object):
# For every hit from ES # For every hit from ES
for index, item in enumerate(list(response["hits"]["hits"])): for index, item in enumerate(list(response["hits"]["hits"])):
# For every blacklisted type # For every blacklisted type
for blacklisted_type in settings.OPENSEARCH_BLACKLISTED.keys(): for blacklisted_type in settings.ELASTICSEARCH_BLACKLISTED.keys():
# Check this field we are matching exists # Check this field we are matching exists
if "_source" in item.keys(): if "_source" in item.keys():
data_index = "_source" data_index = "_source"
@@ -228,9 +232,7 @@ class StorageBackend(object):
if blacklisted_type in item[data_index].keys(): if blacklisted_type in item[data_index].keys():
content = item[data_index][blacklisted_type] content = item[data_index][blacklisted_type]
# For every item in the blacklisted array for the type # For every item in the blacklisted array for the type
for blacklisted_item in settings.OPENSEARCH_BLACKLISTED[ for blacklisted_item in settings.BLACKLISTED[blacklisted_type]:
blacklisted_type
]:
if blacklisted_item == str(content): if blacklisted_item == str(content):
# Remove the item # Remove the item
if item in response["hits"]["hits"]: if item in response["hits"]["hits"]:
@@ -255,7 +257,7 @@ class StorageBackend(object):
# Actually get rid of all the things we set to None # Actually get rid of all the things we set to None
response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit] response["hits"]["hits"] = [hit for hit in response["hits"]["hits"] if hit]
def query(self, user, search_query): def query(self, user, search_query, **kwargs):
# For time tracking # For time tracking
start = time.process_time() start = time.process_time()
if settings.CACHE: if settings.CACHE:
@@ -265,8 +267,6 @@ class StorageBackend(object):
cache_hit = r.get(f"query_cache.{user.id}.{hash}") cache_hit = r.get(f"query_cache.{user.id}.{hash}")
if cache_hit: if cache_hit:
response = orjson.loads(cache_hit) response = orjson.loads(cache_hit)
print("CACHE HIT", response)
time_took = (time.process_time() - start) * 1000 time_took = (time.process_time() - start) * 1000
# Round to 3 significant figures # Round to 3 significant figures
time_took_rounded = round( time_took_rounded = round(
@@ -277,7 +277,19 @@ class StorageBackend(object):
"took": time_took_rounded, "took": time_took_rounded,
"cache": True, "cache": True,
} }
response = self.run_query(user, search_query) response = self.run_query(user, search_query, **kwargs)
# For Elasticsearch
if isinstance(response, Exception):
message = f"Error: {response.info['error']['root_cause'][0]['type']}"
message_class = "danger"
return {"message": message, "class": message_class}
if len(response["hits"]["hits"]) == 0:
message = "No results."
message_class = "danger"
return {"message": message, "class": message_class}
# For Druid
if "error" in response: if "error" in response:
if "errorMessage" in response: if "errorMessage" in response:
context = { context = {
@@ -287,12 +299,12 @@ class StorageBackend(object):
return context return context
else: else:
return response return response
# response = response.to_dict()
# print("RESP", response)
if "took" in response: if "took" in response:
if response["took"] is None: if response["took"] is None:
return None return None
self.filter_blacklisted(user, response)
# Removed for now, no point given we have restricted indexes
# self.filter_blacklisted(user, response)
# Parse the response # Parse the response
response_parsed = self.parse(response) response_parsed = self.parse(response)
@@ -308,18 +320,20 @@ class StorageBackend(object):
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1) time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
return {"object_list": response_parsed, "took": time_took_rounded} return {"object_list": response_parsed, "took": time_took_rounded}
@abstractmethod
def query_results(self, **kwargs): def query_results(self, **kwargs):
raise NotImplementedError pass
def process_results(self, response, **kwargs): def process_results(self, response, **kwargs):
if kwargs.get("annotate"): if kwargs.get("annotate"):
annotate_results(response) annotate_results(response)
if kwargs.get("dedup"): if kwargs.get("reverse"):
response = response[::-1] response.reverse()
if kwargs.get("dedup"): if kwargs.get("dedup"):
if not kwargs.get("dedup_fields"): if not kwargs.get("dedup_fields"):
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"] dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
response = helpers.dedup_list(response, dedup_fields) response = helpers.dedup_list(response, dedup_fields)
@abstractmethod
def parse(self, response): def parse(self, response):
raise NotImplementedError pass

View File

@@ -77,7 +77,8 @@ class DruidBackend(StorageBackend):
self.add_type("or", search_query, extra_should2) self.add_type("or", search_query, extra_should2)
return search_query return search_query
def construct_query(self, query, size, index, blank=False): def construct_query(self, query, size, blank=False, **kwargs):
index = kwargs.get("index")
search_query = { search_query = {
"limit": size, "limit": size,
"queryType": "scan", "queryType": "scan",
@@ -107,19 +108,13 @@ class DruidBackend(StorageBackend):
def parse(self, response): def parse(self, response):
parsed = parse_druid(response) parsed = parse_druid(response)
print("PARSE LEN", len(parsed))
return parsed return parsed
def run_query(self, user, search_query): def run_query(self, user, search_query):
ss = orjson.dumps(search_query, option=orjson.OPT_INDENT_2) ss = orjson.dumps(search_query, option=orjson.OPT_INDENT_2)
ss = ss.decode() ss = ss.decode()
print(ss) response = requests.post("http://druid:8082/druid/v2", json=search_query)
response = requests.post("http://broker:8082/druid/v2", json=search_query)
response = orjson.loads(response.text) response = orjson.loads(response.text)
print("RESPONSE LEN", len(response))
# ss = orjson.dumps(response, option=orjson.OPT_INDENT_2)
# ss = ss.decode()
# print(ss)
return response return response
def filter_blacklisted(self, user, response): def filter_blacklisted(self, user, response):
@@ -172,7 +167,7 @@ class DruidBackend(StorageBackend):
# Q/T - Query/Tags # Q/T - Query/Tags
search_query = self.parse_query( search_query = self.parse_query(
query_params, tags, size, index, custom_query, add_bool query_params, tags, size, custom_query, add_bool, index=index
) )
# Query should be a dict, so check if it contains message here # Query should be a dict, so check if it contains message here
if "message" in search_query: if "message" in search_query:
@@ -239,11 +234,6 @@ class DruidBackend(StorageBackend):
dedup_fields=dedup_fields, dedup_fields=dedup_fields,
reverse=reverse, reverse=reverse,
) )
# ss = orjson.dumps(list(response), option=orjson.OPT_INDENT_2)
# ss = ss.decode()
# print(ss)
# print("PARSED", results_parsed)
# return results_parsed
context = response context = response
return context return context

375
core/db/elastic.py Normal file
View File

@@ -0,0 +1,375 @@
# from copy import deepcopy
# from datetime import datetime, timedelta
from django.conf import settings
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import NotFoundError, RequestError
from core.db import StorageBackend
# from json import dumps
# pp = lambda x: print(dumps(x, indent=2))
from core.db.processing import parse_results
from core.views import helpers
class ElasticsearchBackend(StorageBackend):
def __init__(self):
super().__init__("Elasticsearch")
def initialise(self, **kwargs):
"""
Inititialise the Elastuicsearch API endpoint.
"""
auth = (settings.ELASTICSEARCH_USERNAME, settings.ELASTICSEARCH_PASSWORD)
client = Elasticsearch(
settings.ELASTICSEARCH_URL, http_auth=auth, verify_certs=False
)
self.client = client
def construct_context_query(
self, index, net, channel, src, num, size, type=None, nicks=None
):
# Get the initial query
query = self.construct_query(None, size, blank=True)
extra_must = []
extra_should = []
extra_should2 = []
if num:
extra_must.append({"match_phrase": {"num": num}})
if net:
extra_must.append({"match_phrase": {"net": net}})
if channel:
extra_must.append({"match": {"channel": channel}})
if nicks:
for nick in nicks:
extra_should2.append({"match": {"nick": nick}})
types = ["msg", "notice", "action", "kick", "topic", "mode"]
fields = [
"nick",
"ident",
"host",
"channel",
"ts",
"msg",
"type",
"net",
"src",
"tokens",
]
query["fields"] = fields
if index == "internal":
fields.append("mtype")
if channel == "*status" or type == "znc":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
# Type is one of msg or notice
# extra_should.append({"match": {"mtype": "msg"}})
# extra_should.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "znc"}})
extra_should.append({"match": {"type": "self"}})
extra_should2.append({"match": {"type": "znc"}})
extra_should2.append({"match": {"nick": channel}})
elif type == "auth":
if {"match": {"channel": channel}} in extra_must:
extra_must.remove({"match": {"channel": channel}})
extra_should2 = []
extra_should2.append({"match": {"nick": channel}})
# extra_should2.append({"match": {"mtype": "msg"}})
# extra_should2.append({"match": {"mtype": "notice"}})
extra_should.append({"match": {"type": "query"}})
extra_should2.append({"match": {"type": "self"}})
extra_should.append({"match": {"nick": channel}})
else:
for ctype in types:
extra_should.append({"equals": {"mtype": ctype}})
else:
for ctype in types:
extra_should.append({"match": {"type": ctype}})
# query = {
# "index": index,
# "limit": size,
# "query": {
# "bool": {
# "must": [
# # {"equals": {"src": src}},
# # {
# # "bool": {
# # "should": [*extra_should],
# # }
# # },
# # {
# # "bool": {
# # "should": [*extra_should2],
# # }
# # },
# *extra_must,
# ]
# }
# },
# "fields": fields,
# # "_source": False,
# }
if extra_must:
for x in extra_must:
query["query"]["bool"]["must"].append(x)
if extra_should:
query["query"]["bool"]["must"].append({"bool": {"should": [*extra_should]}})
if extra_should2:
query["query"]["bool"]["must"].append(
{"bool": {"should": [*extra_should2]}}
)
return query
def construct_query(self, query, size, blank=False):
"""
Accept some query parameters and construct an Elasticsearch query.
"""
query_base = {
"size": size,
"query": {"bool": {"must": []}},
}
query_string = {
"query_string": {
"query": query,
# "fields": fields,
# "default_field": "msg",
# "type": "best_fields",
"fuzziness": "AUTO",
"fuzzy_transpositions": True,
"fuzzy_max_expansions": 50,
"fuzzy_prefix_length": 0,
# "minimum_should_match": 1,
"default_operator": "or",
"analyzer": "standard",
"lenient": True,
"boost": 1,
"allow_leading_wildcard": True,
# "enable_position_increments": False,
"phrase_slop": 3,
# "max_determinized_states": 10000,
"quote_field_suffix": "",
"quote_analyzer": "standard",
"analyze_wildcard": False,
"auto_generate_synonyms_phrase_query": True,
}
}
if not blank:
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def parse(self, response):
parsed = parse_results(response)
return parsed
def run_query(self, user, search_query, **kwargs):
"""
Low level helper to run an ES query.
Accept a user to pass it to the filter, so we can
avoid filtering for superusers.
Accept fields and size, for the fields we want to match and the
number of results to return.
"""
index = kwargs.get("index")
try:
response = self.client.search(body=search_query, index=index)
except RequestError as err:
print("Elasticsearch error", err)
return err
except NotFoundError as err:
print("Elasticsearch error", err)
return err
return response
def query_results(
self,
request,
query_params,
size=None,
annotate=True,
custom_query=False,
reverse=False,
dedup=False,
dedup_fields=None,
tags=None,
):
add_bool = []
add_top = []
add_top_negative = []
helpers.add_defaults(query_params)
# Now, run the helpers for SIQTSRSS/ADR
# S - Size
# I - Index
# Q - Query
# T - Tags
# S - Source
# R - Ranges
# S - Sort
# S - Sentiment
# A - Annotate
# D - Dedup
# R - Reverse
# S - Size
if request.user.is_anonymous:
sizes = settings.MAIN_SIZES_ANON
else:
sizes = settings.MAIN_SIZES
if not size:
size = self.parse_size(query_params, sizes)
if isinstance(size, dict):
return size
# I - Index
index = self.parse_index(request.user, query_params)
if isinstance(index, dict):
return index
# Q/T - Query/Tags
search_query = self.parse_query(
query_params, tags, size, custom_query, add_bool
)
# Query should be a dict, so check if it contains message here
if "message" in search_query:
return search_query
# S - Sources
sources = self.parse_source(request.user, query_params)
if isinstance(sources, dict):
return sources
total_count = len(sources)
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}}
for source_iter in sources:
add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}}
)
add_top.append(add_top_tmp)
# R - Ranges
# date_query = False
from_ts, to_ts = self.parse_date_time(query_params)
if from_ts:
range_query = {
"range": {
"ts": {
"gt": from_ts,
"lt": to_ts,
}
}
}
add_top.append(range_query)
# S - Sort
sort = self.parse_sort(query_params)
if isinstance(sort, dict):
return sort
if sort:
# For Druid compatibility
sort_map = {"ascending": "asc", "descending": "desc"}
sorting = [
{
"ts": {
"order": sort_map[sort],
}
}
]
search_query["sort"] = sorting
# S - Sentiment
sentiment_r = self.parse_sentiment(query_params)
if isinstance(sentiment_r, dict):
return sentiment_r
if sentiment_r:
sentiment_method, sentiment = sentiment_r
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = {
"match": {
"sentiment": None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
add_top_negative.append(range_query_precise)
# Add in the additional information we already populated
self.add_bool(search_query, add_bool)
self.add_top(search_query, add_top)
self.add_top(search_query, add_top_negative, negative=True)
response = self.query(
request.user,
search_query,
index=index,
)
if "message" in response:
return response
# A/D/R - Annotate/Dedup/Reverse
self.process_results(
response["object_list"],
annotate=annotate,
dedup=dedup,
dedup_fields=dedup_fields,
reverse=reverse,
)
context = response
return context
def query_single_result(self, request, query_params):
context = self.query_results(request, query_params, size=100)
if not context:
return {"message": "Failed to run query", "message_class": "danger"}
if "message" in context:
return context
dedup_set = {item["nick"] for item in context["object_list"]}
if dedup_set:
context["item"] = context["object_list"][0]
return context
def add_bool(self, search_query, add_bool):
"""
Add the specified boolean matches to search query.
"""
if not add_bool:
return
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match_phrase": item})
def add_top(self, search_query, add_top, negative=False):
"""
Merge add_top with the base of the search_query.
"""
if not add_top:
return
if negative:
for item in add_top:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
else:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)

View File

@@ -1,485 +0,0 @@
# from copy import deepcopy
# from datetime import datetime, timedelta
from django.conf import settings
from opensearchpy import OpenSearch
from opensearchpy.exceptions import NotFoundError, RequestError
from core.db import StorageBackend
# from json import dumps
# pp = lambda x: print(dumps(x, indent=2))
from core.db.processing import annotate_results, parse_results
from core.views.helpers import dedup_list
class OpensearchBackend(StorageBackend):
def __init__(self):
super().__init__("Opensearch")
def initialise(self, **kwargs):
"""
Inititialise the OpenSearch API endpoint.
"""
auth = (settings.OPENSEARCH_USERNAME, settings.OPENSEARCH_PASSWORD)
client = OpenSearch(
# fmt: off
hosts=[{"host": settings.OPENSEARCH_URL,
"port": settings.OPENSEARCH_PORT}],
http_compress=False, # enables gzip compression for request bodies
http_auth=auth,
# client_cert = client_cert_path,
# client_key = client_key_path,
use_ssl=settings.OPENSEARCH_TLS,
verify_certs=False,
ssl_assert_hostname=False,
ssl_show_warn=False,
# a_certs=ca_certs_path,
)
self.client = client
def construct_query(self, query, size, use_query_string=True, tokens=False):
"""
Accept some query parameters and construct an OpenSearch query.
"""
if not size:
size = 5
query_base = {
"size": size,
"query": {"bool": {"must": []}},
}
query_string = {
"query_string": {
"query": query,
# "fields": fields,
# "default_field": "msg",
# "type": "best_fields",
"fuzziness": "AUTO",
"fuzzy_transpositions": True,
"fuzzy_max_expansions": 50,
"fuzzy_prefix_length": 0,
# "minimum_should_match": 1,
"default_operator": "or",
"analyzer": "standard",
"lenient": True,
"boost": 1,
"allow_leading_wildcard": True,
# "enable_position_increments": False,
"phrase_slop": 3,
# "max_determinized_states": 10000,
"quote_field_suffix": "",
"quote_analyzer": "standard",
"analyze_wildcard": False,
"auto_generate_synonyms_phrase_query": True,
}
}
query_tokens = {
"simple_query_string": {
# "tokens": query,
"query": query,
"fields": ["tokens"],
"flags": "ALL",
"fuzzy_transpositions": True,
"fuzzy_max_expansions": 50,
"fuzzy_prefix_length": 0,
"default_operator": "and",
"analyzer": "standard",
"lenient": True,
"boost": 1,
"quote_field_suffix": "",
"analyze_wildcard": False,
"auto_generate_synonyms_phrase_query": False,
}
}
if tokens:
query_base["query"]["bool"]["must"].append(query_tokens)
# query["query"]["bool"]["must"].append(query_string)
# query["query"]["bool"]["must"][0]["query_string"]["fields"] = ["tokens"]
elif use_query_string:
query_base["query"]["bool"]["must"].append(query_string)
return query_base
def run_query(self, client, user, query, custom_query=False, index=None, size=None):
"""
Low level helper to run an ES query.
Accept a user to pass it to the filter, so we can
avoid filtering for superusers.
Accept fields and size, for the fields we want to match and the
number of results to return.
"""
if not index:
index = settings.INDEX_MAIN
if custom_query:
search_query = query
else:
search_query = self.construct_query(query, size)
try:
response = client.search(body=search_query, index=index)
except RequestError as err:
print("OpenSearch error", err)
return err
except NotFoundError as err:
print("OpenSearch error", err)
return err
return response
def query_results(
self,
request,
query_params,
size=None,
annotate=True,
custom_query=False,
reverse=False,
dedup=False,
dedup_fields=None,
lookup_hashes=True,
tags=None,
):
"""
API helper to alter the OpenSearch return format into something
a bit better to parse.
Accept a HTTP request object. Run the query, and annotate the
results with the other data we have.
"""
# is_anonymous = isinstance(request.user, AnonymousUser)
query = None
message = None
message_class = None
add_bool = []
add_top = []
add_top_negative = []
sort = None
query_created = False
# Lookup the hash values but don't disclose them to the user
# denied = []
# if lookup_hashes:
# if settings.HASHING:
# query_params = deepcopy(query_params)
# denied_q = hash_lookup(request.user, query_params)
# denied.extend(denied_q)
# if tags:
# denied_t = hash_lookup(request.user, tags, query_params)
# denied.extend(denied_t)
# message = "Permission denied: "
# for x in denied:
# if isinstance(x, SearchDenied):
# message += f"Search({x.key}: {x.value}) "
# elif isinstance(x, LookupDenied):
# message += f"Lookup({x.key}: {x.value}) "
# if denied:
# # message = [f"{i}" for i in message]
# # message = "\n".join(message)
# message_class = "danger"
# return {"message": message, "class": message_class}
if request.user.is_anonymous:
sizes = settings.MAIN_SIZES_ANON
else:
sizes = settings.MAIN_SIZES
if not size:
if "size" in query_params:
size = query_params["size"]
if size not in sizes:
message = "Size is not permitted"
message_class = "danger"
return {"message": message, "class": message_class}
else:
size = 20
source = None
if "source" in query_params:
source = query_params["source"]
if source in settings.SOURCES_RESTRICTED:
if not request.user.has_perm("core.restricted_sources"):
message = "Access denied"
message_class = "danger"
return {"message": message, "class": message_class}
elif source not in settings.MAIN_SOURCES:
message = "Invalid source"
message_class = "danger"
return {"message": message, "class": message_class}
if source == "all":
source = None # the next block will populate it
if source:
sources = [source]
else:
sources = settings.MAIN_SOURCES
if request.user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED:
sources.append(source_iter)
add_top_tmp = {"bool": {"should": []}}
for source_iter in sources:
add_top_tmp["bool"]["should"].append({"match_phrase": {"src": source_iter}})
add_top.append(add_top_tmp)
# date_query = False
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
query_params.keys()
):
from_ts = f"{query_params['from_date']}T{query_params['from_time']}Z"
to_ts = f"{query_params['to_date']}T{query_params['to_time']}Z"
range_query = {
"range": {
"ts": {
"gt": from_ts,
"lt": to_ts,
}
}
}
add_top.append(range_query)
# if date_query:
# if settings.DELAY_RESULTS:
# if source not in settings.SAFE_SOURCES:
# if request.user.has_perm("core.bypass_delay"):
# add_top.append(range_query)
# else:
# delay_as_ts = datetime.now() - timedelta(
# days=settings.DELAY_DURATION
# )
# lt_as_ts = datetime.strptime(
# range_query["range"]["ts"]["lt"], "%Y-%m-%dT%H:%MZ"
# )
# if lt_as_ts > delay_as_ts:
# range_query["range"]["ts"][
# "lt"
# ] = f"now-{settings.DELAY_DURATION}d"
# add_top.append(range_query)
# else:
# add_top.append(range_query)
# else:
# if settings.DELAY_RESULTS:
# if source not in settings.SAFE_SOURCES:
# if not request.user.has_perm("core.bypass_delay"):
# range_query = {
# "range": {
# "ts": {
# # "gt": ,
# "lt": f"now-{settings.DELAY_DURATION}d",
# }
# }
# }
# add_top.append(range_query)
if "sorting" in query_params:
sorting = query_params["sorting"]
if sorting not in ("asc", "desc", "none"):
message = "Invalid sort"
message_class = "danger"
return {"message": message, "class": message_class}
if sorting in ("asc", "desc"):
sort = [
{
"ts": {
"order": sorting,
}
}
]
if "check_sentiment" in query_params:
if "sentiment_method" not in query_params:
message = "No sentiment method"
message_class = "danger"
return {"message": message, "class": message_class}
if "sentiment" in query_params:
sentiment = query_params["sentiment"]
try:
sentiment = float(sentiment)
except ValueError:
message = "Sentiment is not a float"
message_class = "danger"
return {"message": message, "class": message_class}
sentiment_method = query_params["sentiment_method"]
range_query_compare = {"range": {"sentiment": {}}}
range_query_precise = {
"match": {
"sentiment": None,
}
}
if sentiment_method == "below":
range_query_compare["range"]["sentiment"]["lt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "above":
range_query_compare["range"]["sentiment"]["gt"] = sentiment
add_top.append(range_query_compare)
elif sentiment_method == "exact":
range_query_precise["match"]["sentiment"] = sentiment
add_top.append(range_query_precise)
elif sentiment_method == "nonzero":
range_query_precise["match"]["sentiment"] = 0
add_top_negative.append(range_query_precise)
# Only one of query or query_full can be active at once
# We prefer query because it's simpler
if "query" in query_params:
query = query_params["query"]
search_query = self.construct_query(query, size, tokens=True)
query_created = True
elif "query_full" in query_params:
query_full = query_params["query_full"]
# if request.user.has_perm("core.query_search"):
search_query = self.construct_query(query_full, size)
query_created = True
# else:
# message = "You cannot search by query string"
# message_class = "danger"
# return {"message": message, "class": message_class}
else:
if custom_query:
search_query = custom_query
if tags:
# Get a blank search query
if not query_created:
search_query = self.construct_query(None, size, use_query_string=False)
query_created = True
for tagname, tagvalue in tags.items():
add_bool.append({tagname: tagvalue})
required_any = ["query_full", "query", "tags"]
if not any([field in query_params.keys() for field in required_any]):
if not custom_query:
message = "Empty query!"
message_class = "warning"
return {"message": message, "class": message_class}
if add_bool:
# if "bool" not in search_query["query"]:
# search_query["query"]["bool"] = {}
# if "must" not in search_query["query"]["bool"]:
# search_query["query"]["bool"] = {"must": []}
for item in add_bool:
search_query["query"]["bool"]["must"].append({"match_phrase": item})
if add_top:
for item in add_top:
search_query["query"]["bool"]["must"].append(item)
if add_top_negative:
for item in add_top_negative:
if "must_not" in search_query["query"]["bool"]:
search_query["query"]["bool"]["must_not"].append(item)
else:
search_query["query"]["bool"]["must_not"] = [item]
if sort:
search_query["sort"] = sort
if "index" in query_params:
index = query_params["index"]
if index == "main":
index = settings.INDEX_MAIN
else:
if not request.user.has_perm(f"core.index_{index}"):
message = "Not permitted to search by this index"
message_class = "danger"
return {
"message": message,
"class": message_class,
}
if index == "meta":
index = settings.INDEX_META
elif index == "internal":
index = settings.INDEX_INT
else:
message = "Index is not valid."
message_class = "danger"
return {
"message": message,
"class": message_class,
}
else:
index = settings.INDEX_MAIN
results = self.query(
request.user, # passed through run_main_query to filter_blacklisted
search_query,
custom_query=True,
index=index,
size=size,
)
if not results:
return False
if isinstance(results, Exception):
message = f"Error: {results.info['error']['root_cause'][0]['type']}"
message_class = "danger"
return {"message": message, "class": message_class}
if len(results["hits"]["hits"]) == 0:
message = "No results."
message_class = "danger"
return {"message": message, "class": message_class}
results_parsed = parse_results(results)
if annotate:
annotate_results(results_parsed)
if "dedup" in query_params:
if query_params["dedup"] == "on":
dedup = True
else:
dedup = False
else:
dedup = False
if reverse:
results_parsed = results_parsed[::-1]
if dedup:
if not dedup_fields:
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
results_parsed = dedup_list(results_parsed, dedup_fields)
# if source not in settings.SAFE_SOURCES:
# if settings.ENCRYPTION:
# encrypt_list(request.user, results_parsed, settings.ENCRYPTION_KEY)
# if settings.HASHING:
# hash_list(request.user, results_parsed)
# if settings.OBFUSCATION:
# obfuscate_list(request.user, results_parsed)
# if settings.RANDOMISATION:
# randomise_list(request.user, results_parsed)
# process_list(results)
# IMPORTANT! - DO NOT PASS query_params to the user!
context = {
"object_list": results_parsed,
"card": results["hits"]["total"]["value"],
"took": results["took"],
}
if "redacted" in results:
context["redacted"] = results["redacted"]
if "exemption" in results:
context["exemption"] = results["exemption"]
if query:
context["query"] = query
# if settings.DELAY_RESULTS:
# if source not in settings.SAFE_SOURCES:
# if not request.user.has_perm("core.bypass_delay"):
# context["delay"] = settings.DELAY_DURATION
# if settings.RANDOMISATION:
# if source not in settings.SAFE_SOURCES:
# if not request.user.has_perm("core.bypass_randomisation"):
# context["randomised"] = True
return context
def query_single_result(self, request, query_params):
context = self.query_results(request, query_params, size=100)
if not context:
return {"message": "Failed to run query", "message_class": "danger"}
if "message" in context:
return context
dedup_set = {item["nick"] for item in context["object_list"]}
if dedup_set:
context["item"] = context["object_list"][0]
return context

View File

@@ -3,7 +3,7 @@ from datetime import datetime
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
def annotate_results(results_parsed): def annotate_results(results):
""" """
Accept a list of dict objects, search for the number of channels and users. Accept a list of dict objects, search for the number of channels and users.
Add them to the object. Add them to the object.
@@ -11,7 +11,7 @@ def annotate_results(results_parsed):
""" """
# Figure out items with net (not discord) # Figure out items with net (not discord)
nets = set() nets = set()
for x in results_parsed: for x in results:
if "net" in x: if "net" in x:
nets.add(x["net"]) nets.add(x["net"])
@@ -21,7 +21,7 @@ def annotate_results(results_parsed):
set( set(
[ [
x["nick"] x["nick"]
for x in results_parsed for x in results
if {"nick", "src", "net"}.issubset(x) if {"nick", "src", "net"}.issubset(x)
and x["src"] == "irc" and x["src"] == "irc"
and x["net"] == net and x["net"] == net
@@ -32,7 +32,7 @@ def annotate_results(results_parsed):
set( set(
[ [
x["channel"] x["channel"]
for x in results_parsed for x in results
if {"channel", "src", "net"}.issubset(x) if {"channel", "src", "net"}.issubset(x)
and x["src"] == "irc" and x["src"] == "irc"
and x["net"] == net and x["net"] == net
@@ -44,7 +44,7 @@ def annotate_results(results_parsed):
num_users = annotate_num_users(net, channels) num_users = annotate_num_users(net, channels)
# Annotate the number channels the user is on # Annotate the number channels the user is on
num_chans = annotate_num_chans(net, nicks) num_chans = annotate_num_chans(net, nicks)
for item in results_parsed: for item in results:
if "net" in item: if "net" in item:
if item["net"] == net: if item["net"] == net:
if "nick" in item: if "nick" in item:

View File

@@ -6,10 +6,10 @@ def get_db():
from core.db.druid import DruidBackend from core.db.druid import DruidBackend
return DruidBackend() return DruidBackend()
elif settings.DB_BACKEND == "OPENSEARCH": elif settings.DB_BACKEND == "ELASTICSEARCH":
from core.db.opensearch import OpensearchBackend from core.db.elastic import ElasticsearchBackend
return OpensearchBackend() return ElasticsearchBackend()
elif settings.DB_BACKEND == "MANTICORE": elif settings.DB_BACKEND == "MANTICORE":
from core.db.manticore import ManticoreBackend from core.db.manticore import ManticoreBackend

View File

@@ -3,7 +3,7 @@ from math import ceil
from django.conf import settings from django.conf import settings
from numpy import array_split from numpy import array_split
from core.db.opensearch import client, run_main_query from core.db.elastic import client, run_main_query
def construct_query(net, nicks): def construct_query(net, nicks):
@@ -48,7 +48,7 @@ def get_meta(request, net, nicks, iter=True):
request.user, request.user,
query, query,
custom_query=True, custom_query=True,
index=settings.OPENSEARCH_INDEX_META, index=settings.ELASTICSEARCH_INDEX_META,
) )
if "hits" in results.keys(): if "hits" in results.keys():
if "hits" in results["hits"]: if "hits" in results["hits"]:

View File

@@ -0,0 +1,17 @@
# Generated by Django 4.1.3 on 2022-11-29 12:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0010_alter_perms_options'),
]
operations = [
migrations.AlterModelOptions(
name='perms',
options={'permissions': (('bypass_hashing', 'Can bypass field hashing'), ('bypass_blacklist', 'Can bypass the blacklist'), ('bypass_encryption', 'Can bypass field encryption'), ('bypass_obfuscation', 'Can bypass field obfuscation'), ('bypass_delay', 'Can bypass data delay'), ('bypass_randomisation', 'Can bypass data randomisation'), ('post_irc', 'Can post to IRC'), ('post_discord', 'Can post to Discord'), ('query_search', 'Can search with query strings'), ('use_insights', 'Can use the Insights page'), ('index_internal', 'Can use the internal index'), ('index_meta', 'Can use the meta index'), ('index_restricted', 'Can use the restricted index'), ('restricted_sources', 'Can access restricted sources'))},
),
]

View File

@@ -117,7 +117,8 @@ class Perms(models.Model):
("post_discord", "Can post to Discord"), ("post_discord", "Can post to Discord"),
("query_search", "Can search with query strings"), # ("query_search", "Can search with query strings"), #
("use_insights", "Can use the Insights page"), ("use_insights", "Can use the Insights page"),
("index_int", "Can use the internal index"), ("index_internal", "Can use the internal index"),
("index_meta", "Can use the meta index"), ("index_meta", "Can use the meta index"),
("index_restricted", "Can use the restricted index"),
("restricted_sources", "Can access restricted sources"), ("restricted_sources", "Can access restricted sources"),
) )

View File

@@ -65,11 +65,12 @@ $(document).ready(function(){
"file_ext": "off", "file_ext": "off",
"file_size": "off", "file_size": "off",
"lang_code": "off", "lang_code": "off",
"tokens": "off",
//"lang_name": "off", //"lang_name": "off",
"words_noun": "off", // "words_noun": "off",
"words_adj": "off", // "words_adj": "off",
"words_verb": "off", // "words_verb": "off",
"words_adv": "off" // "words_adv": "off"
}, },
}; };
} else { } else {

View File

@@ -320,8 +320,18 @@
{% endblock %} {% endblock %}
<section class="section"> <section class="section">
<div class="container"> <div class="container">
{% block content_wrapper %}
{% block content %} {% block content %}
{% endblock %} {% endblock %}
{% endblock %}
<div id="modals-here">
</div>
<div id="windows-here">
</div>
<div id="widgets-here" style="display: none;">
{% block widgets %}
{% endblock %}
</div>
</div> </div>
</section> </section>
</body> </body>

View File

@@ -1,48 +1,152 @@
{% extends "base.html" %} {% extends 'base.html' %}
{% load static %} {% load static %}
{% load joinsep %}
{% block outer_content %}
{% if params.modal == 'context' %}
<div
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_context' %}"
hx-vals='{"net": "{{ params.net|escapejs }}",
"num": "{{ params.num|escapejs }}",
"source": "{{ params.source|escapejs }}",
"channel": "{{ params.channel|escapejs }}",
"time": "{{ params.time|escapejs }}",
"date": "{{ params.date|escapejs }}",
"index": "{{ params.index }}",
"type": "{{ params.type|escapejs }}",
"mtype": "{{ params.mtype|escapejs }}",
"nick": "{{ params.nick|escapejs }}"}'
hx-target="#modals-here"
hx-trigger="load">
</div>
{% endif %}
<script src="{% static 'js/chart.js' %}"></script>
<script src="{% static 'tabs.js' %}"></script>
<script>
function setupTags() {
var inputTags = document.getElementById('tags');
new BulmaTagsInput(inputTags);
{% block content %} inputTags.BulmaTagsInput().on('before.add', function(item) {
<div class="block"> if (item.includes(": ")) {
{% for block in blocks %} var spl = item.split(": ");
{% if block.title is not None %} } else {
<h1 class="title">{{ block.title }}</h1> var spl = item.split(":");
{% endif %} }
<div class="box"> var field = spl[0];
<div class="columns"> try {
{% if block.column1 is not None %} var value = JSON.parse(spl[1]);
<div class="column"> } catch {
{{ block.column1 }} var value = spl[1];
</div> }
{% endif %} return `${field}: ${value}`;
{% if block.column2 is not None %} });
<div class="column"> inputTags.BulmaTagsInput().on('after.remove', function(item) {
{{ block.column2 }} var spl = item.split(": ");
</div> var field = spl[0];
{% endif %} var value = spl[1].trim();
{% if block.column3 is not None %} });
<div class="column"> }
{{ block.column3 }} function populateSearch(field, value) {
</div> var inputTags = document.getElementById('tags');
{% endif %} inputTags.BulmaTagsInput().add(field+": "+value);
</div> //htmx.trigger("#search", "click");
<div class="columns"> }
{% if block.image1 is not None %} </script>
<div class="column">
<img src="{% static block.image1 %}"> <div class="grid-stack" id="grid-stack-main">
</div> <div class="grid-stack-item" gs-w="7" gs-h="10" gs-y="0" gs-x="1">
{% endif %} <div class="grid-stack-item-content">
{% if block.image2 is not None %} <nav class="panel">
<div class="column"> <p class="panel-heading" style="padding: .2em; line-height: .5em;">
<img src="{% static block.image2 %}"> <i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
</div> Search
{% endif %} </p>
{% if block.image3 is not None %} <article class="panel-block is-active">
<div class="column"> {% include 'window-content/search.html' %}
<img src="{% static block.image3 %}"> </article>
</div> </nav>
{% endif %}
</div> </div>
</div> </div>
{% endfor %}
</div> </div>
<script>
var grid = GridStack.init({
cellHeight: 20,
cellWidth: 50,
cellHeightUnit: 'px',
auto: true,
float: true,
draggable: {handle: '.panel-heading', scroll: false, appendTo: 'body'},
removable: false,
animate: true,
});
// GridStack.init();
setupTags();
// a widget is ready to be loaded
document.addEventListener('load-widget', function(event) {
let container = htmx.find('#widget');
// get the scripts, they won't be run on the new element so we need to eval them
var scripts = htmx.findAll(container, "script");
let widgetelement = container.firstElementChild.cloneNode(true);
var new_id = widgetelement.id;
// check if there's an existing element like the one we want to swap
let grid_element = htmx.find('#grid-stack-main');
let existing_widget = htmx.find(grid_element, "#"+new_id);
// get the size and position attributes
if (existing_widget) {
let attrs = existing_widget.getAttributeNames();
for (let i = 0, len = attrs.length; i < len; i++) {
if (attrs[i].startsWith('gs-')) { // only target gridstack attributes
widgetelement.setAttribute(attrs[i], existing_widget.getAttribute(attrs[i]));
}
}
}
// clear the queue element
container.outerHTML = "";
// temporary workaround, other widgets can be duplicated, but not results
if (widgetelement.id == 'widget-results') {
grid.removeWidget("widget-results");
}
grid.addWidget(widgetelement);
// re-create the HTMX JS listeners, otherwise HTMX won't work inside the grid
htmx.process(widgetelement);
// update size when the widget is loaded
document.addEventListener('load-widget-results', function(evt) {
var added_widget = htmx.find(grid_element, '#widget-results');
var itemContent = htmx.find(added_widget, ".control");
var scrollheight = itemContent.scrollHeight+80;
var verticalmargin = 0;
var cellheight = grid.opts.cellHeight;
var height = Math.ceil((scrollheight + verticalmargin) / (cellheight + verticalmargin));
var opts = {
h: height,
}
grid.update(
added_widget,
opts
);
});
// run the JS scripts inside the added element again
// for instance, this will fix the dropdown
for (var i = 0; i < scripts.length; i++) {
eval(scripts[i].innerHTML);
}
});
</script>
{% endblock %}
{% block widgets %}
{% if table or message is not None %}
{% include 'partials/results_load.html' %}
{% endif %}
{% endblock %} {% endblock %}

View File

@@ -0,0 +1 @@
<button class="modal-close is-large" aria-label="close"></button>

View File

@@ -0,0 +1,3 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
onclick='grid.removeWidget("widget-{{ unique }}");'></i>

View File

@@ -0,0 +1,3 @@
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
data-script="on click remove the closest <nav/>"></i>

View File

@@ -1,20 +1,10 @@
{% extends 'wm/widget.html' %} {% extends 'wm/widget.html' %}
{% load static %} {% load static %}
{% block widget_options %}
gs-w="10" gs-h="1" gs-y="10" gs-x="1"
{% endblock %}
{% block heading %} {% block heading %}
Results Results
{% endblock %} {% endblock %}
{% block close_button %}
<i
class="fa-solid fa-xmark has-text-grey-light float-right"
onclick='grid.removeWidget("drilldown-widget-{{ unique }}"); //grid.compact();'></i>
{% endblock %}
{% block panel_content %} {% block panel_content %}
{% include 'partials/notify.html' %} {% include 'partials/notify.html' %}
<script src="{% static 'js/column-shifter.js' %}"></script> <script src="{% static 'js/column-shifter.js' %}"></script>
@@ -38,6 +28,6 @@
{% endif %} {% endif %}
{% endif %} {% endif %}
{% include 'ui/drilldown/table_results_partial.html' %} {% include 'partials/results_table.html' %}
{% include 'ui/drilldown/sentiment_partial.html' %} {% include 'partials/sentiment_chart.html' %}
{% endblock %} {% endblock %}

View File

@@ -141,10 +141,6 @@
<i class="fa-solid fa-file-slash"></i> <i class="fa-solid fa-file-slash"></i>
</span> </span>
</td> </td>
{% elif column.name == 'tokens' %}
<td class="{{ column.name }} wrap" style="max-width: 10em">
{{ cell|joinsep:',' }}
</td>
{% elif column.name == 'src' %} {% elif column.name == 'src' %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">
<a <a
@@ -301,7 +297,7 @@
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}' hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_drilldown' type='window' %}" hx-post="{% url 'modal_drilldown' type='window' %}"
hx-vals='{"net": "{{ row.cells.net }}", "nick": "{{ row.cells.nick }}", "channel": "{{ row.cells.channel }}"}' hx-vals='{"net": "{{ row.cells.net }}", "nick": "{{ row.cells.nick }}", "channel": "{{ row.cells.channel }}"}'
hx-target="#items-here" hx-target="#windows-here"
hx-swap="afterend" hx-swap="afterend"
hx-trigger="click" hx-trigger="click"
class="has-text-black"> class="has-text-black">
@@ -364,15 +360,8 @@
</span> </span>
{% endif %} {% endif %}
</td> </td>
{% elif column.name|slice:":6" == "words_" %} {% elif column.name == "tokens" %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">
{% if cell.0.1|length == 0 %}
<a
class="tag is-info"
onclick="populateSearch('{{ column.name }}', '{{ cell }}')">
{{ cell }}
</a>
{% else %}
<div class="tags"> <div class="tags">
{% for word in cell %} {% for word in cell %}
<a <a
@@ -382,7 +371,6 @@
</a> </a>
{% endfor %} {% endfor %}
</div> </div>
{% endif %}
</td> </td>
{% else %} {% else %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">

View File

@@ -1,163 +0,0 @@
{% extends "base.html" %}
{% load static %}
{% load joinsep %}
{% block outer_content %}
{% if params.modal == 'context' %}
<div
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_context' %}"
hx-vals='{"net": "{{ params.net|escapejs }}",
"num": "{{ params.num|escapejs }}",
"source": "{{ params.source|escapejs }}",
"channel": "{{ params.channel|escapejs }}",
"time": "{{ params.time|escapejs }}",
"date": "{{ params.date|escapejs }}",
"index": "{{ params.index }}",
"type": "{{ params.type|escapejs }}",
"mtype": "{{ params.mtype|escapejs }}",
"nick": "{{ params.nick|escapejs }}"}'
hx-target="#modals-here"
hx-trigger="load">
</div>
{% endif %}
<script src="{% static 'js/chart.js' %}"></script>
<script src="{% static 'tabs.js' %}"></script>
<script>
function setupTags() {
var inputTags = document.getElementById('tags');
new BulmaTagsInput(inputTags);
inputTags.BulmaTagsInput().on('before.add', function(item) {
if (item.includes(": ")) {
var spl = item.split(": ");
} else {
var spl = item.split(":");
}
var field = spl[0];
try {
var value = JSON.parse(spl[1]);
} catch {
var value = spl[1];
}
return `${field}: ${value}`;
});
inputTags.BulmaTagsInput().on('after.remove', function(item) {
var spl = item.split(": ");
var field = spl[0];
var value = spl[1].trim();
});
}
function populateSearch(field, value) {
var inputTags = document.getElementById('tags');
inputTags.BulmaTagsInput().add(field+": "+value);
//htmx.trigger("#search", "click");
}
</script>
<div class="grid-stack" id="grid-stack-main">
<div class="grid-stack-item" gs-w="7" gs-h="10" gs-y="0" gs-x="1">
<div class="grid-stack-item-content">
<nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
Search
</p>
<article class="panel-block is-active">
{% include 'ui/drilldown/search_partial.html' %}
</article>
</nav>
</div>
</div>
</div>
<script>
var grid = GridStack.init({
cellHeight: 20,
cellWidth: 50,
cellHeightUnit: 'px',
auto: true,
float: true,
draggable: {handle: '.panel-heading', scroll: false, appendTo: 'body'},
removable: false,
animate: true,
});
// GridStack.init();
setupTags();
// a widget is ready to be loaded
document.addEventListener('load-widget', function(event) {
let container = htmx.find('#drilldown-widget');
// get the scripts, they won't be run on the new element so we need to eval them
var scripts = htmx.findAll(container, "script");
let widgetelement = container.firstElementChild.cloneNode(true);
// check if there's an existing element like the one we want to swap
let grid_element = htmx.find('#grid-stack-main');
let existing_widget = htmx.find(grid_element, '#drilldown-widget-results');
// get the size and position attributes
if (existing_widget) {
let attrs = existing_widget.getAttributeNames();
for (let i = 0, len = attrs.length; i < len; i++) {
if (attrs[i].startsWith('gs-')) { // only target gridstack attributes
widgetelement.setAttribute(attrs[i], existing_widget.getAttribute(attrs[i]));
}
}
}
// clear the queue element
container.outerHTML = "";
// temporary workaround, other widgets can be duplicated, but not results
if (widgetelement.id == 'drilldown-widget-results') {
grid.removeWidget("drilldown-widget-{{ unique }}");
}
grid.addWidget(widgetelement);
// re-create the HTMX JS listeners, otherwise HTMX won't work inside the grid
htmx.process(widgetelement);
// update size when the widget is loaded
document.addEventListener('load-widget-results', function(evt) {
var added_widget = htmx.find(grid_element, '#drilldown-widget-results');
console.log(added_widget);
var itemContent = htmx.find(added_widget, ".control");
console.log(itemContent);
var scrollheight = itemContent.scrollHeight+80;
var verticalmargin = 0;
var cellheight = grid.opts.cellHeight;
var height = Math.ceil((scrollheight + verticalmargin) / (cellheight + verticalmargin));
var opts = {
h: height,
}
grid.update(
added_widget,
opts
);
});
// run the JS scripts inside the added element again
// for instance, this will fix the dropdown
for (var i = 0; i < scripts.length; i++) {
eval(scripts[i].innerHTML);
}
});
</script>
<div id="modals-here">
</div>
<div id="items-here">
</div>
<div id="widgets-here" style="display: none;">
</div>
<div id="results" style="display: none;">
{% if table %}
{% include 'widgets/table_results.html' %}
{% endif %}
</div>
<script>
</script>
{% endblock %}

View File

@@ -1,122 +0,0 @@
{% load index %}
{% load static %}
<script src="{% static 'modal.js' %}"></script>
<script>
document.addEventListener("restore-modal-scroll", function(event) {
var modalContent = document.getElementsByClassName("modal-content")[0];
var maxScroll = modalContent.scrollHeight - modalContent.offsetHeight;
var scrollpos = localStorage.getItem('scrollpos_modal_content');
if (scrollpos == 'BOTTOM') {
modalContent.scrollTop = maxScroll;
} else if (scrollpos) {
modalContent.scrollTop = scrollpos;
};
});
document.addEventListener("htmx:beforeSwap", function(event) {
var modalContent = document.getElementsByClassName("modal-content")[0];
var scrollpos = modalContent.scrollTop;
if(modalContent.scrollTop === (modalContent.scrollHeight - modalContent.offsetHeight)) {
localStorage.setItem('scrollpos_modal_content', 'BOTTOM');
} else {
localStorage.setItem('scrollpos_modal_content', scrollpos);
}
});
</script>
<style>
#tab-content-{{ unique }} div {
display: none;
}
#tab-content-{{ unique }} div.is-active {
display: block;
}
</style>
<div id="modal" class="modal is-active is-clipped">
<div class="modal-background"></div>
<div class="modal-content">
<div class="box">
{% include 'partials/notify.html' %}
<div class="tabs is-toggle is-fullwidth is-info" id="tabs-{{ unique }}">
<ul>
<li class="is-active" data-tab="1">
<a>
<span class="icon is-small"><i class="fa-solid fa-message-arrow-down"></i></span>
<span>Scrollback</span>
</a>
</li>
<li data-tab="2">
<a>
<span class="icon is-small"><i class="fa-solid fa-messages"></i></span>
<span>Context</span>
</a>
</li>
<li data-tab="3">
<a>
<span class="icon is-small"><i class="fa-solid fa-message"></i></span>
<span>Message</span>
</a>
</li>
<li data-tab="4">
<a>
<span class="icon is-small"><i class="fa-solid fa-asterisk"></i></span>
<span>Info</span>
</a>
</li>
</ul>
</div>
<div id="tab-content-{{ unique }}">
<div class="is-active" data-content="1">
<h4 class="subtitle is-4">Scrollback of {{ channel }} on {{ net }}{{ num }}</h4>
{% include 'modals/context_table.html' %}
{% if user.is_superuser and src == 'irc' %}
<form method="PUT">
<article class="field has-addons">
<article class="control is-expanded has-icons-left">
<input id="context-input" name="msg" class="input" type="text" placeholder="Type your message here">
<span class="icon is-small is-left">
<i class="fas fa-magnifying-glass"></i>
</span>
</article>
<article class="control">
<article class="field">
<button
id="search"
class="button is-info is-fullwidth"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-put="{% url 'threshold_irc_msg' net num %}"
hx-vals='{"channel": "{{ channel }}", "nick": "{{ nick }}"}'
hx-trigger="click"
hx-target="#context-input"
hx-swap="outerHTML">
Send
</button>
</article>
</article>
</article>
</form>
{% endif %}
</div>
<div data-content="2">
<h4 class="subtitle is-4">Scrollback of {{ channel }} on {{ net }}{{ num }} around {{ ts }}</h4>
Context
</div>
<div data-content="3">
<h4 class="subtitle is-4">Message details</h4>
Message deetails
</div>
<div data-content="4">
<h4 class="subtitle is-4">Information about {{ channel }} on {{ net }}{{ num }}</h4>
info
</div>
</div>
</div>
<script>initTabs("{{ unique }}");</script>
<button class="modal-close is-large" aria-label="close"></button>
</div>
</div>

View File

@@ -1,177 +0,0 @@
<article class="table-container" id="modal-context-table">
<table class="table is-fullwidth">
<thead>
<th></th>
<th></th>
<th></th>
</thead>
<tbody>
{% for item in object_list %}
{% if item.type == 'control' %}
<tr>
<td></td>
<td>
<span class="icon has-text-grey" data-tooltip="Hidden">
<i class="fa-solid fa-file-slash"></i>
</span>
</td>
<td>
<p class="has-text-grey">Hidden {{ item.hidden }} similar result{% if item.hidden > 1%}s{% endif %}</p>
</td>
</tr>
{% else %}
<tr>
<td>{{ item.time }}</td>
<td>
{% if item.type != 'znc' and item.type != 'self' and query is not True %}
<article class="nowrap-parent">
<article class="nowrap-child">
{% if item.type == 'msg' %}
<span class="icon" data-tooltip="Message">
<i class="fa-solid fa-message"></i>
</span>
{% elif item.type == 'join' %}
<span class="icon" data-tooltip="Join">
<i class="fa-solid fa-person-to-portal"></i>
</span>
{% elif item.type == 'part' %}
<span class="icon" data-tooltip="Part">
<i class="fa-solid fa-person-from-portal"></i>
</span>
{% elif item.type == 'quit' %}
<span class="icon" data-tooltip="Quit">
<i class="fa-solid fa-circle-xmark"></i>
</span>
{% elif item.type == 'kick' %}
<span class="icon" data-tooltip="Kick">
<i class="fa-solid fa-user-slash"></i>
</span>
{% elif item.type == 'nick' %}
<span class="icon" data-tooltip="Nick">
<i class="fa-solid fa-signature"></i>
</span>
{% elif item.type == 'mode' %}
<span class="icon" data-tooltip="Mode">
<i class="fa-solid fa-gear"></i>
</span>
{% elif item.type == 'action' %}
<span class="icon" data-tooltip="Action">
<i class="fa-solid fa-exclamation"></i>
</span>
{% elif item.type == 'notice' %}
<span class="icon" data-tooltip="Notice">
<i class="fa-solid fa-message-code"></i>
</span>
{% elif item.type == 'conn' %}
<span class="icon" data-tooltip="Connection">
<i class="fa-solid fa-cloud-exclamation"></i>
</span>
{% elif item.type == 'znc' %}
<span class="icon" data-tooltip="ZNC">
<i class="fa-brands fa-unity"></i>
</span>
{% elif item.type == 'query' %}
<span class="icon" data-tooltip="Query">
<i class="fa-solid fa-message"></i>
</span>
{% elif item.type == 'highlight' %}
<span class="icon" data-tooltip="Highlight">
<i class="fa-solid fa-exclamation"></i>
</span>
{% elif item.type == 'who' %}
<span class="icon" data-tooltip="Who">
<i class="fa-solid fa-passport"></i>
</span>
{% elif item.type == 'topic' %}
<span class="icon" data-tooltip="Topic">
<i class="fa-solid fa-sign"></i>
</span>
{% else %}
{{ item.type }}
{% endif %}
{% if item.online is True %}
<span class="icon has-text-success has-tooltip-success" data-tooltip="Online">
<i class="fa-solid fa-circle"></i>
</span>
{% elif item.online is False %}
<span class="icon has-text-danger has-tooltip-danger" data-tooltip="Offline">
<i class="fa-solid fa-circle"></i>
</span>
{% else %}
<span class="icon has-text-warning has-tooltip-warning" data-tooltip="Unknown">
<i class="fa-solid fa-circle"></i>
</span>
{% endif %}
{% if item.src == 'irc' %}
<a
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_drilldown' %}"
hx-vals='{"net": "{{ item.net|escapejs }}", "nick": "{{ item.nick|escapejs }}", "channel": "{{ item.channel|escapejs }}"}'
hx-target="#modals-here"
hx-trigger="click"
class="has-text-black">
<span class="icon" data-tooltip="Open drilldown modal">
<i class="fa-solid fa-album"></i>
</span>
</a>
{% endif %}
</article>
<a class="nowrap-child has-text-link is-underlined" onclick="populateSearch('nick', '{{ item.nick|escapejs }}')">
{{ item.nick }}
</a>
{% if item.num_chans != '—' %}
<article class="nowrap-child">
<span class="tag">
{{ item.num_chans }}
</span>
</article>
{% endif %}
</article>
{% endif %}
{% if item.type == 'self' %}
<span class="icon has-text-primary" data-tooltip="You">
<i class="fa-solid fa-message-check"></i>
</span>
{% elif item.type == 'znc' %}
<span class="icon has-text-info" data-tooltip="ZNC">
<i class="fa-brands fa-unity"></i>
</span>
{% elif query %}
<span class="icon has-text-info" data-tooltip="Auth">
<i class="fa-solid fa-passport"></i>
</span>
{% endif %}
</td>
<td class="wrap">{{ item.msg }}</td>
</tr>
{% endif %}
{% endfor %}
</tbody>
</table>
{% if object_list %}
<div
class="modal-refresh"
style="display: none;"
hx-headers='{"X-CSRFToken": "{{ csrf_token }}"}'
hx-post="{% url 'modal_context_table' %}"
hx-vals='{"net": "{{ net }}",
"num": "{{ num }}",
"src": "{{ src }}",
"channel": "{{ channel }}",
"time": "{{ time }}",
"date": "{{ date }}",
"index": "{{ index }}",
"type": "{{ type }}",
"mtype": "{{ mtype }}",
"nick": "{{ nick }}",
"dedup": "{{ params.dedup }}"}'
hx-target="#modal-context-table"
hx-trigger="every 5s">
</div>
{% endif %}
</article>
<script>
var modal_event = new Event('restore-modal-scroll');
document.dispatchEvent(modal_event);
</script>

View File

@@ -0,0 +1,26 @@
{% load static %}
{% include 'partials/notify.html' %}
<script src="{% static 'js/column-shifter.js' %}"></script>
{% if cache is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="Cached">
<i class="fa-solid fa-database"></i>
</span>
{% endif %}
fetched {{ table.data|length }} hits in {{ took }}ms
{% if exemption is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="God mode">
<i class="fa-solid fa-book-bible"></i>
</span>
{% else %}
{% if redacted is not None %}
<span class="icon has-tooltip-bottom" data-tooltip="{{ redacted }} redacted">
<i class="fa-solid fa-mask"></i>
</span>
{% endif %}
{% endif %}
{% include 'partials/results_table.html' %}
{% include 'partials/sentiment_chart.html' %}

View File

@@ -1,6 +1,6 @@
<form class="skipEmptyFields" method="POST" hx-post="{% url 'search' %}" <form class="skipEmptyFields" method="POST" hx-post="{% url 'search' %}"
hx-trigger="change" hx-trigger="change"
hx-target="#results" hx-target="#widgets-here"
hx-swap="innerHTML" hx-swap="innerHTML"
hx-indicator="#spinner"> hx-indicator="#spinner">
{% csrf_token %} {% csrf_token %}
@@ -11,7 +11,7 @@
<input <input
hx-post="{% url 'search' %}" hx-post="{% url 'search' %}"
hx-trigger="keyup changed delay:200ms" hx-trigger="keyup changed delay:200ms"
hx-target="#results" hx-target="#widgets-here"
hx-swap="innerHTML" hx-swap="innerHTML"
name="query" name="query"
value="{{ params.query }}" value="{{ params.query }}"
@@ -29,7 +29,7 @@
class="button is-info is-fullwidth" class="button is-info is-fullwidth"
hx-post="{% url 'search' %}" hx-post="{% url 'search' %}"
hx-trigger="click" hx-trigger="click"
hx-target="#results" hx-target="#widgets-here"
hx-swap="innerHTML"> hx-swap="innerHTML">
Search Search
</button> </button>
@@ -394,7 +394,7 @@
<input <input
hx-trigger="change" hx-trigger="change"
hx-post="{% url 'search' %}" hx-post="{% url 'search' %}"
hx-target="#results" hx-target="#widgets-here"
hx-swap="innerHTML" hx-swap="innerHTML"
id="tags" id="tags"
class="input" class="input"

View File

@@ -1,4 +1,4 @@
{% extends 'wm/magnet.html' %} {% extends 'wm/window.html' %}
{% block heading %} {% block heading %}
Drilldown Drilldown

View File

@@ -12,8 +12,9 @@
<div class="modal-content"> <div class="modal-content">
<div class="box"> <div class="box">
{% block modal_content %} {% block modal_content %}
{% include window_content %}
{% endblock %} {% endblock %}
<button class="modal-close is-large" aria-label="close"></button> {% include 'partials/close-modal.html' %}
</div> </div>
</div> </div>
</div> </div>

View File

@@ -0,0 +1,6 @@
{% extends "base.html" %}
{% block content %}
{% include window_content %}
{% endblock %}

View File

@@ -3,9 +3,7 @@
<p class="panel-heading" style="padding: .2em; line-height: .5em;"> <p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i> <i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %} {% block close_button %}
<i {% include 'partials/close-window.html' %}
class="fa-solid fa-xmark has-text-grey-light float-right"
data-script="on click remove the closest <nav/>"></i>
{% endblock %} {% endblock %}
{% block heading %} {% block heading %}
{% endblock %} {% endblock %}

View File

@@ -1,24 +1,24 @@
<div id="drilldown-widget"> <div id="widget">
<div id="drilldown-widget-{{ unique }}" class="grid-stack-item" {% block widget_options %}{% endblock %}> <div id="widget-{{ unique }}" class="grid-stack-item" {% block widget_options %}gs-w="10" gs-h="1" gs-y="10" gs-x="1"{% endblock %}>
<div class="grid-stack-item-content"> <div class="grid-stack-item-content">
<nav class="panel"> <nav class="panel">
<p class="panel-heading" style="padding: .2em; line-height: .5em;"> <p class="panel-heading" style="padding: .2em; line-height: .5em;">
<i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i> <i class="fa-solid fa-arrows-up-down-left-right has-text-grey-light"></i>
{% block close_button %} {% block close_button %}
<i {% include 'partials/close-widget.html' %}
class="fa-solid fa-xmark has-text-grey-light float-right"
onclick='grid.removeWidget("drilldown-widget-{{ unique }}");'></i>
{% endblock %} {% endblock %}
<i <i
class="fa-solid fa-arrows-minimize has-text-grey-light float-right" class="fa-solid fa-arrows-minimize has-text-grey-light float-right"
onclick='grid.compact();'></i> onclick='grid.compact();'></i>
{% block heading %} {% block heading %}
{{ title }}
{% endblock %} {% endblock %}
</p> </p>
<article class="panel-block is-active"> <article class="panel-block is-active">
<div class="control"> <div class="control">
{% block panel_content %} {% block panel_content %}
{% include window_content %}
{% endblock %} {% endblock %}
</div> </div>
</article> </article>

View File

@@ -1,8 +1,10 @@
<magnet-block attract-distance="10" align-to="outer|center" class="floating-window"> <magnet-block attract-distance="10" align-to="outer|center" class="floating-window">
{% extends 'wm/panel.html' %} {% extends 'wm/panel.html' %}
{% block heading %} {% block heading %}
{{ title }}
{% endblock %} {% endblock %}
{% block panel_content %} {% block panel_content %}
{% include window_content %}
{% endblock %} {% endblock %}
</magnet-block> </magnet-block>

View File

@@ -6,7 +6,6 @@ from django.conf import settings
from django.http import HttpResponse, JsonResponse from django.http import HttpResponse, JsonResponse
from django.shortcuts import render from django.shortcuts import render
from django.urls import reverse from django.urls import reverse
from django.views import View
from django_tables2 import SingleTableView from django_tables2 import SingleTableView
from rest_framework.parsers import FormParser from rest_framework.parsers import FormParser
from rest_framework.views import APIView from rest_framework.views import APIView
@@ -215,21 +214,133 @@ def drilldown_search(request, return_context=False, template=None):
class DrilldownTableView(SingleTableView): class DrilldownTableView(SingleTableView):
table_class = DrilldownTable table_class = DrilldownTable
template_name = "widgets/table_results.html" template_name = "wm/widget.html"
window_content = "window-content/results.html"
# htmx_partial = "partials/"
paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE paginate_by = settings.DRILLDOWN_RESULTS_PER_PAGE
def get_queryset(self, request, **kwargs): def common_request(self, request, **kwargs):
context = drilldown_search(request, return_context=True) extra_params = {}
# Save the context as we will need to merge other attributes later
self.context = context
if "object_list" in context: if request.user.is_anonymous:
return context["object_list"] sizes = settings.MAIN_SIZES_ANON
else: else:
return [] sizes = settings.MAIN_SIZES
if request.GET:
self.template_name = "index.html"
# GET arguments in URL like ?query=xyz
query_params = request.GET.dict()
elif request.POST:
query_params = request.POST.dict()
else:
self.template_name = "index.html"
# No query, this is a fresh page load
# Don't try to search, since there's clearly nothing to do
params_with_defaults = {}
helpers.add_defaults(params_with_defaults)
context = {
"sizes": sizes,
"params": params_with_defaults,
"unique": "results",
"window_content": self.window_content,
"title": "Results",
}
return render(request, self.template_name, context)
# Merge everything together just in case
tmp_post = request.POST.dict()
tmp_get = request.GET.dict()
tmp_post = {k: v for k, v in tmp_post.items() if v and not v == "None"}
tmp_get = {k: v for k, v in tmp_get.items() if v and not v == "None"}
query_params.update(tmp_post)
query_params.update(tmp_get)
# URI we're passing to the template for linking
if "csrfmiddlewaretoken" in query_params:
del query_params["csrfmiddlewaretoken"]
# Parse the dates
if "dates" in query_params:
dates = parse_dates(query_params["dates"])
del query_params["dates"]
if dates:
if "message" in dates:
return render(request, self.template_name, dates)
query_params["from_date"] = dates["from_date"]
query_params["to_date"] = dates["to_date"]
query_params["from_time"] = dates["from_time"]
query_params["to_time"] = dates["to_time"]
# Remove null values
if "query" in query_params:
if query_params["query"] == "":
del query_params["query"]
# Remove null tags values
if "tags" in query_params:
if query_params["tags"] == "":
del query_params["tags"]
else:
# Parse the tags and populate cast to pass to search function
tags = parse_tags(query_params["tags"])
extra_params["tags"] = tags
context = db.query_results(request, query_params, **extra_params)
# Unique is for identifying the widgets.
# We don't want a random one since we only want one results pane.
context["unique"] = "results"
context["window_content"] = self.window_content
context["title"] = "Results"
# Valid sizes
context["sizes"] = sizes
# Add any default parameters to the context
params_with_defaults = dict(query_params)
helpers.add_defaults(params_with_defaults)
context["params"] = params_with_defaults
# Remove anything that we or the user set to a default for
# pretty URLs
helpers.remove_defaults(query_params)
url_params = urllib.parse.urlencode(query_params)
context["client_uri"] = url_params
# There's an error
if "message" in context:
response = render(request, self.template_name, context)
# Still push the URL so they can share it to get assistance
if request.GET:
if request.htmx:
response["HX-Push"] = reverse("home") + "?" + url_params
elif request.POST:
response["HX-Push"] = reverse("home") + "?" + url_params
return response
# Create data for chart.js sentiment graph
graph = make_graph(context["object_list"])
context["data"] = graph
# Create the table
context = make_table(context)
# URI we're passing to the template for linking, table fields removed
table_fields = ["page", "sort"]
clean_params = {k: v for k, v in query_params.items() if k not in table_fields}
clean_url_params = urllib.parse.urlencode(clean_params)
context["uri"] = clean_url_params
# unique = str(uuid.uuid4())[:8]
# self.context = context
return context
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset(request) self.context = self.common_request(request)
if isinstance(self.context, HttpResponse):
return self.context
self.object_list = self.context["object_list"]
show = [] show = []
show = set().union(*(d.keys() for d in self.object_list)) show = set().union(*(d.keys() for d in self.object_list))
allow_empty = self.get_allow_empty() allow_empty = self.get_allow_empty()
@@ -245,17 +356,17 @@ class DrilldownTableView(SingleTableView):
else: else:
is_empty = not self.object_list # noqa is_empty = not self.object_list # noqa
context = self.get_context_data() context = self.get_context_data()
if isinstance(self.context, HttpResponse):
return self.context
for k, v in self.context.items(): for k, v in self.context.items():
if k not in context: if k not in context:
context[k] = v context[k] = v
context["show"] = show context["show"] = show
if request.method == "GET": # if request.htmx:
if not request.htmx: # self.template_name = self.window_content
self.template_name = "ui/drilldown/drilldown.html" # if request.method == "GET":
# if not request.htmx:
# self.template_name = "ui/drilldown/drilldown.html"
response = self.render_to_response(context) response = self.render_to_response(context)
# if not request.method == "GET": # if not request.method == "GET":
if "client_uri" in context: if "client_uri" in context:
@@ -266,15 +377,15 @@ class DrilldownTableView(SingleTableView):
return self.get(request, *args, **kwargs) return self.get(request, *args, **kwargs)
class Drilldown(View): # class Drilldown(View):
template_name = "ui/drilldown/drilldown.html" # template_name = "ui/drilldown/drilldown.html"
plan_name = "drilldown" # plan_name = "drilldown"
def get(self, request): # def get(self, request):
return drilldown_search(request) # return drilldown_search(request)
def post(self, request): # def post(self, request):
return drilldown_search(request) # return drilldown_search(request)
class DrilldownContextModal(APIView): class DrilldownContextModal(APIView):
@@ -389,19 +500,6 @@ class DrilldownContextModal(APIView):
if "message" in results: if "message" in results:
return render(request, self.template_name, results) return render(request, self.template_name, results)
# if settings.HASHING: # we probably want to see the tokens
# if query_params["source"] not in settings.SAFE_SOURCES:
# if not request.user.has_perm("core.bypass_hashing"):
# for index, item in enumerate(results["object_list"]):
# if "tokens" in item:
# results["object_list"][index]["msg"] = results[
# "object_list"
# ][index].pop("tokens")
# # item["msg"] = item.pop("tokens")
# Make the time nicer
# for index, item in enumerate(results["object_list"]):
# results["object_list"][index]["time"] = item["time"]+"SSS"
unique = str(uuid.uuid4())[:8] unique = str(uuid.uuid4())[:8]
context = { context = {
"net": query_params["net"], "net": query_params["net"],
@@ -449,45 +547,18 @@ class ThresholdInfoModal(APIView):
nick = request.data["nick"] nick = request.data["nick"]
channel = request.data["channel"] channel = request.data["channel"]
# SAFE BLOCK #
# Lookup the hash values but don't disclose them to the user
# if settings.HASHING:
# SAFE_PARAMS = request.data.dict()
# hash_lookup(request.user, SAFE_PARAMS)
channels = get_chans(net, [nick]) channels = get_chans(net, [nick])
print("CHANNELS", channels)
users = get_users(net, [nick]) users = get_users(net, [nick])
print("USERS", users)
num_users = annotate_num_users(net, channels) num_users = annotate_num_users(net, channels)
print("NUM_USERS", num_users)
num_chans = annotate_num_chans(net, users) num_chans = annotate_num_chans(net, users)
print("NUM_CHANS", num_chans)
if channels: if channels:
inter_users = get_users(net, channels) inter_users = get_users(net, channels)
else: else:
inter_users = [] inter_users = []
print("INTER_USERS", inter_users)
if users: if users:
inter_chans = get_chans(net, users) inter_chans = get_chans(net, users)
else: else:
inter_chans = [] inter_chans = []
print("INTER_CHANS", inter_chans)
# if settings.HASHING:
# hash_list(request.user, inter_chans)
# hash_list(request.user, inter_users)
# hash_list(request.user, num_chans, hash_keys=True)
# hash_list(request.user, num_users, hash_keys=True)
# hash_list(request.user, channels)
# hash_list(request.user, users)
# if settings.RANDOMISATION:
# randomise_list(request.user, num_chans)
# randomise_list(request.user, num_users)
# SAFE BLOCK END #
unique = str(uuid.uuid4())[:8] unique = str(uuid.uuid4())[:8]
context = { context = {
@@ -502,5 +573,4 @@ class ThresholdInfoModal(APIView):
"num_chans": num_chans, "num_chans": num_chans,
"unique": unique, "unique": unique,
} }
print("CON", context)
return render(request, self.template_name, context) return render(request, self.template_name, context)

View File

@@ -64,14 +64,13 @@ class DrilldownTable(Table):
mtype = Column() mtype = Column()
realname = Column() realname = Column()
server = Column() server = Column()
mtype = Column() tokens = Column()
# tokens = Column()
lang_code = Column() lang_code = Column()
lang_name = Column() lang_name = Column()
words_noun = Column() # words_noun = Column()
words_adj = Column() # words_adj = Column()
words_verb = Column() # words_verb = Column()
words_adv = Column() # words_adv = Column()
hidden = Column() hidden = Column()
filename = Column() filename = Column()
file_md5 = Column() file_md5 = Column()

View File

@@ -1,17 +1,21 @@
version: "2" version: "2.2"
services: services:
app: app:
image: pathogen/neptune:latest image: pathogen/neptune:latest
build: ./docker container_name: neptune
build:
context: .
args:
OPERATION: ${OPERATION}
volumes: volumes:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${NEPTUNE_LOCAL_SETTINGS}:/code/app/local_settings.py - ${PORTAINER_GIT_DIR}/docker/uwsgi.ini:/conf/uwsgi.ini
- ${NEPTUNE_DATABASE_FILE}:/code/db.sqlite3 - ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
ports: - ${APP_DATABASE_FILE}:/code/db.sqlite3
- "${NEPTUNE_PORT}:8000" - neptune_static:${STATIC_ROOT}
env_file: env_file:
- .env - stack.env
volumes_from: volumes_from:
- tmp - tmp
depends_on: depends_on:
@@ -19,49 +23,107 @@ services:
condition: service_healthy condition: service_healthy
migration: migration:
condition: service_started condition: service_started
collectstatic:
condition: service_started
networks:
- default
- pathogen
- elastic
migration: migration:
image: pathogen/neptune:latest image: pathogen/neptune:latest
container_name: migration_neptune
build:
context: .
args:
OPERATION: ${OPERATION}
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput' command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes: volumes:
- ${PORTAINER_GIT_DIR}:/code - ${PORTAINER_GIT_DIR}:/code
- ${NEPTUNE_LOCAL_SETTINGS}:/code/app/local_settings.py - ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${NEPTUNE_DATABASE_FILE}:/code/db.sqlite3 - ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
volumes_from: volumes_from:
- tmp - tmp
depends_on: depends_on:
redis: redis:
condition: service_healthy condition: service_healthy
# pyroscope: collectstatic:
# image: pyroscope/pyroscope image: pathogen/neptune:latest
# environment: container_name: collectstatic_neptune
# - PYROSCOPE_LOG_LEVEL=debug build:
# ports: context: .
# - '4040:4040' args:
# command: OPERATION: ${OPERATION}
# - 'server' command: sh -c '. /venv/bin/activate && python manage.py collectstatic --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${APP_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${APP_DATABASE_FILE}:/code/db.sqlite3
- neptune_static:${STATIC_ROOT}
env_file:
- stack.env
depends_on:
redis:
condition: service_healthy
nginx:
image: nginx:latest
container_name: nginx_neptune
ports:
- ${APP_PORT}:9999
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/nginx/conf.d/${OPERATION}.conf:/etc/nginx/conf.d/default.conf
- neptune_static:${STATIC_ROOT}
volumes_from:
- tmp
networks:
- default
- pathogen
depends_on:
app:
condition: service_started
tmp: tmp:
image: busybox image: busybox
command: chmod -R 777 /var/run/redis container_name: tmp_neptune
command: chmod -R 777 /var/run/socks
volumes: volumes:
- /var/run/redis - /var/run/socks
redis: redis:
image: redis image: redis
container_name: redis_neptune
command: redis-server /etc/redis.conf command: redis-server /etc/redis.conf
ulimits:
nproc: 65535
nofile:
soft: 65535
hard: 65535
volumes: volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf - ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
volumes_from: volumes_from:
- tmp - tmp
healthcheck: healthcheck:
test: "redis-cli -s /var/run/redis/redis.sock ping" test: "redis-cli -s /var/run/socks/redis.sock ping"
interval: 2s interval: 2s
timeout: 2s timeout: 2s
retries: 15 retries: 15
networks: networks:
default: default:
external: driver: bridge
name: pathogen pathogen:
external: true
elastic:
external: true
volumes:
neptune_static: {}

View File

@@ -1,60 +0,0 @@
version: "2"
services:
app:
image: pathogen/neptune:latest
build: ./docker/prod
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${PORTAINER_GIT_DIR}/docker/prod/uwsgi.ini:/conf/uwsgi.ini
- ${NEPTUNE_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${NEPTUNE_DATABASE_FILE}:/code/db.sqlite3
ports:
- "${NEPTUNE_PORT}:8000" # uwsgi socket
env_file:
- ../stack.env
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
migration:
condition: service_started
migration:
image: pathogen/neptune:latest
build: ./docker/prod
command: sh -c '. /venv/bin/activate && python manage.py migrate --noinput'
volumes:
- ${PORTAINER_GIT_DIR}:/code
- ${NEPTUNE_LOCAL_SETTINGS}:/code/app/local_settings.py
- ${NEPTUNE_DATABASE_FILE}:/code/db.sqlite3
volumes_from:
- tmp
depends_on:
redis:
condition: service_healthy
tmp:
image: busybox
command: chmod -R 777 /var/run/redis
volumes:
- /var/run/redis
redis:
image: redis
command: redis-server /etc/redis.conf
volumes:
- ${PORTAINER_GIT_DIR}/docker/redis.conf:/etc/redis.conf
volumes_from:
- tmp
healthcheck:
test: "redis-cli -s /var/run/redis/redis.sock ping"
interval: 2s
timeout: 2s
retries: 15
networks:
default:
external:
name: pathogen

View File

@@ -0,0 +1,23 @@
upstream django {
#server app:8000;
#server unix:///var/run/socks/app.sock;
server app:8000;
}
server {
listen 9999;
location = /favicon.ico { access_log off; log_not_found off; }
location /static/ {
root /conf;
}
location / {
proxy_pass http://django;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $host;
}
}

View File

@@ -0,0 +1,24 @@
upstream django {
server app:8000;
#server unix:///var/run/socks/app.sock;
}
server {
listen 9999;
location = /favicon.ico { access_log off; log_not_found off; }
location /static/ {
root /conf;
}
location / {
include /etc/nginx/uwsgi_params; # the uwsgi_params file you installed
uwsgi_pass django;
uwsgi_param Host $host;
uwsgi_param X-Real-IP $remote_addr;
uwsgi_param X-Forwarded-For $proxy_add_x_forwarded_for;
uwsgi_param X-Forwarded-Proto $http_x_forwarded_proto;
}
}

View File

@@ -1,21 +0,0 @@
# syntax=docker/dockerfile:1
FROM python:3
RUN useradd -d /code pathogen
RUN mkdir /code
RUN chown pathogen:pathogen /code
RUN mkdir /conf
RUN chown pathogen:pathogen /conf
RUN mkdir /venv
RUN chown pathogen:pathogen /venv
USER pathogen
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /code
COPY requirements.prod.txt /code/
RUN python -m venv /venv
RUN . /venv/bin/activate && pip install -r requirements.prod.txt
CMD . /venv/bin/activate && uwsgi --ini /conf/uwsgi.ini

View File

@@ -1,19 +0,0 @@
wheel
django
django-crispy-forms
crispy-bulma
#opensearch-py
stripe
django-rest-framework
numpy
uwsgi
django-tables2
django-tables2-bulma-template
django-htmx
cryptography
siphashc
redis
sortedcontainers
django-debug-toolbar
django-debug-toolbar-template-profiler
orjson

View File

@@ -1,2 +1,2 @@
unixsocket /var/run/redis/redis.sock unixsocket /var/run/socks/redis.sock
unixsocketperm 777 unixsocketperm 777

View File

@@ -1,18 +0,0 @@
wheel
django
django-crispy-forms
crispy-bulma
#opensearch-py
stripe
django-rest-framework
numpy
django-tables2
django-tables2-bulma-template
django-htmx
cryptography
siphashc
redis
sortedcontainers
django-debug-toolbar
django-debug-toolbar-template-profiler
orjson

View File

@@ -5,9 +5,8 @@ env=DJANGO_SETTINGS_MODULE=app.settings
master=1 master=1
pidfile=/tmp/project-master.pid pidfile=/tmp/project-master.pid
socket=0.0.0.0:8000 socket=0.0.0.0:8000
processes=5
harakiri=20 harakiri=20
max-requests=5000 max-requests=100000
vacuum=1 vacuum=1
home=/venv home=/venv
processes=12

View File

@@ -1,9 +1,10 @@
wheel wheel
uwsgi
django django
pre-commit pre-commit
django-crispy-forms django-crispy-forms
crispy-bulma crispy-bulma
#opensearch-py elasticsearch
stripe stripe
django-rest-framework django-rest-framework
numpy numpy

View File

@@ -1,4 +1,6 @@
NEPTUNE_PORT=5000 APP_PORT=5000
PORTAINER_GIT_DIR=.. PORTAINER_GIT_DIR=.
NEPTUNE_LOCAL_SETTINGS=../app/local_settings.py APP_LOCAL_SETTINGS=./app/local_settings.py
NEPTUNE_DATABASE_FILE=../db.sqlite3 APP_DATABASE_FILE=./db.sqlite3
STATIC_ROOT=/conf/static
OPERATION=dev