Compare commits

..

No commits in common. "7b6da7b70465059a653c25c863f241936938e06a" and "a1a553507913d568d86532e114bc2a456e94eb8c" have entirely different histories.

13 changed files with 39 additions and 150 deletions

View File

@ -42,7 +42,6 @@ INSTALLED_APPS = [
"crispy_bulma", "crispy_bulma",
"django_tables2", "django_tables2",
"django_tables2_bulma_template", "django_tables2_bulma_template",
"prettyjson",
] ]
CRISPY_TEMPLATE_PACK = "bulma" CRISPY_TEMPLATE_PACK = "bulma"
CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",) CRISPY_ALLOWED_TEMPLATE_PACKS = ("bulma",)

View File

@ -20,7 +20,7 @@ logger = logging.getLogger(__name__)
class DruidBackend(StorageBackend): class DruidBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("druid") super().__init__("Druid")
def initialise(self, **kwargs): def initialise(self, **kwargs):
# self.client = PyDruid("http://broker:8082", "druid/v2") # self.client = PyDruid("http://broker:8082", "druid/v2")

View File

@ -33,7 +33,7 @@ mapping = {
"ts": {"type": "date", "format": "epoch_second"}, "ts": {"type": "date", "format": "epoch_second"},
"match_ts": {"type": "date", "format": "iso8601"}, "match_ts": {"type": "date", "format": "iso8601"},
"file_tim": {"type": "date", "format": "epoch_millis"}, "file_tim": {"type": "date", "format": "epoch_millis"},
"rule_id": {"type": "keyword"}, "rule_uuid": {"type": "keyword"},
} }
} }
} }
@ -43,7 +43,7 @@ for field in keyword_fields:
class ElasticsearchBackend(StorageBackend): class ElasticsearchBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("elasticsearch") super().__init__("Elasticsearch")
self.client = None self.client = None
self.async_client = None self.async_client = None
@ -272,12 +272,10 @@ class ElasticsearchBackend(StorageBackend):
""" """
if self.async_client is None: if self.async_client is None:
await self.async_initialise() await self.async_initialise()
print("MATCHES", matches)
for match in matches: for match in matches:
result = await self.async_client.index( result = await self.async_client.index(
index=settings.INDEX_RULE_STORAGE, body=match index=settings.INDEX_RULE_STORAGE, body=match
) )
print("RESULT", result)
if not result["result"] == "created": if not result["result"] == "created":
self.log.error(f"Indexing failed: {result}") self.log.error(f"Indexing failed: {result}")
self.log.debug(f"Indexed {len(matches)} messages in ES") self.log.debug(f"Indexed {len(matches)} messages in ES")
@ -441,7 +439,6 @@ class ElasticsearchBackend(StorageBackend):
raise QueryError(error) raise QueryError(error)
if len(response["hits"]["hits"]) == 0: if len(response["hits"]["hits"]) == 0:
# No results, skip # No results, skip
result_map[index] = ({}, [])
continue continue
meta, response = self.parse(response, meta=True) meta, response = self.parse(response, meta=True)
# print("Parsed response", response) # print("Parsed response", response)
@ -503,7 +500,7 @@ class ElasticsearchBackend(StorageBackend):
if rule_object is not None: if rule_object is not None:
index = settings.INDEX_RULE_STORAGE index = settings.INDEX_RULE_STORAGE
add_bool.append({"rule_id": str(rule_object.id)}) add_bool.append({"rule_uuid": str(rule_object.id)})
else: else:
# I - Index # I - Index
index = parse_index(request.user, query_params) index = parse_index(request.user, query_params)
@ -523,19 +520,14 @@ class ElasticsearchBackend(StorageBackend):
if isinstance(sources, dict): if isinstance(sources, dict):
return sources return sources
total_count = len(sources) total_count = len(sources)
# Total is -1 due to the "all" source total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
)
print("total_count", total_count, "total_sources", total_sources)
if total_count != total_sources: if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}} add_top_tmp = {"bool": {"should": []}}
for source_iter in sources: for source_iter in sources:
add_top_tmp["bool"]["should"].append( add_top_tmp["bool"]["should"].append(
{"match_phrase": {"src": source_iter}} {"match_phrase": {"src": source_iter}}
) )
if rule_object is not None and query_params["source"] != "all": add_top.append(add_top_tmp)
add_top.append(add_top_tmp)
# R - Ranges # R - Ranges
# date_query = False # date_query = False
@ -555,17 +547,12 @@ class ElasticsearchBackend(StorageBackend):
sort = parse_sort(query_params) sort = parse_sort(query_params)
if isinstance(sort, dict): if isinstance(sort, dict):
return sort return sort
if rule_object is not None:
field = "match_ts"
else:
field = "ts"
if sort: if sort:
# For Druid compatibility # For Druid compatibility
sort_map = {"ascending": "asc", "descending": "desc"} sort_map = {"ascending": "asc", "descending": "desc"}
sorting = [ sorting = [
{ {
field: { "ts": {
"order": sort_map[sort], "order": sort_map[sort],
} }
} }
@ -607,7 +594,6 @@ class ElasticsearchBackend(StorageBackend):
search_query, search_query,
index=index, index=index,
) )
print("query", search_query)
if "message" in response: if "message" in response:
return response return response

View File

@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
class ManticoreBackend(StorageBackend): class ManticoreBackend(StorageBackend):
def __init__(self): def __init__(self):
super().__init__("manticore") super().__init__("Manticore")
def initialise(self, **kwargs): def initialise(self, **kwargs):
""" """

View File

@ -118,7 +118,6 @@ def parse_source(user, query_params, raise_error=False):
if source: if source:
sources = [source] sources = [source]
else: else:
print("NOT SOURCE")
sources = list(settings.MAIN_SOURCES) sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"): if user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED: for source_iter in settings.SOURCES_RESTRICTED:

View File

@ -8,7 +8,6 @@ try:
except ImportError: except ImportError:
from yaml import Loader, Dumper from yaml import Loader, Dumper
import uuid
from datetime import datetime from datetime import datetime
import orjson import orjson
@ -287,18 +286,12 @@ class NotificationRuleData(object):
matches = [matches] matches = [matches]
matches_copy = matches.copy() matches_copy = matches.copy()
match_ts = datetime.utcnow().isoformat() match_ts = datetime.utcnow().isoformat()
batch_id = uuid.uuid4()
# Filter empty fields in meta
meta = {k: v for k, v in meta.items() if v}
for match_index, _ in enumerate(matches_copy): for match_index, _ in enumerate(matches_copy):
matches_copy[match_index]["index"] = index matches_copy[match_index]["index"] = index
matches_copy[match_index]["rule_id"] = str(self.object.id) matches_copy[match_index]["rule_uuid"] = self.object.id
matches_copy[match_index]["meta"] = meta matches_copy[match_index]["meta"] = meta
matches_copy[match_index]["match_ts"] = match_ts matches_copy[match_index]["match_ts"] = match_ts
matches_copy[match_index]["mode"] = mode matches_copy[match_index]["mode"] = mode
matches_copy[match_index]["batch_id"] = str(batch_id)
return matches_copy return matches_copy
async def ingest_matches(self, index, matches, meta, mode): async def ingest_matches(self, index, matches, meta, mode):
@ -331,25 +324,15 @@ class NotificationRuleData(object):
current_match = self.get_match(index, message) current_match = self.get_match(index, message)
log.debug(f"Rule matched: {index} - current match: {current_match}") log.debug(f"Rule matched: {index} - current match: {current_match}")
last_run_had_matches = current_match is True # Default policy: Trigger only when results change
if self.policy in ["change", "default"]: if current_match is False:
# Change or Default policy, notifying only on new results # Matched now, but not before
if last_run_had_matches: if "matched" not in meta:
# Last run had matches, and this one did too meta["matched"] = self.format_aggs(meta["aggs"])
# We don't need to notify rule_notify(self.object, index, message, meta)
return self.store_match(index, message)
await self.ingest_matches(index, message, meta, mode)
elif self.policy == "always":
# Only here for completeness, we notify below by default
pass
# We hit the return above if we don't need to notify
if "aggs" in meta and "matched" not in meta:
meta["matched"] = self.format_aggs(meta["aggs"])
rule_notify(self.object, index, message, meta)
self.store_match(index, message)
await self.ingest_matches(index, message, meta, mode)
def rule_matched_sync(self, index, message, meta, mode): def rule_matched_sync(self, index, message, meta, mode):
""" """
@ -363,25 +346,14 @@ class NotificationRuleData(object):
current_match = self.get_match(index, message) current_match = self.get_match(index, message)
log.debug(f"Rule matched: {index} - current match: {current_match}") log.debug(f"Rule matched: {index} - current match: {current_match}")
last_run_had_matches = current_match is True # Default policy: Trigger only when results change
if current_match is False:
if self.policy in ["change", "default"]: # Matched now, but not before
# Change or Default policy, notifying only on new results if "matched" not in meta:
if last_run_had_matches: meta["matched"] = self.format_aggs(meta["aggs"])
# Last run had matches, and this one did too rule_notify(self.object, index, message, meta)
# We don't need to notify self.store_match(index, message)
return self.ingest_matches_sync(index, message, meta, mode)
elif self.policy == "always":
# Only here for completeness, we notify below by default
pass
# We hit the return above if we don't need to notify
if "aggs" in meta and "matched" not in meta:
meta["matched"] = self.format_aggs(meta["aggs"])
rule_notify(self.object, index, message, meta)
self.store_match(index, message)
self.ingest_matches_sync(index, message, meta, mode)
# No async helper for this one as we only need it for schedules # No async helper for this one as we only need it for schedules
async def rule_no_match(self, index=None, message=None): async def rule_no_match(self, index=None, message=None):
@ -395,29 +367,15 @@ class NotificationRuleData(object):
current_match = self.get_match(index) current_match = self.get_match(index)
log.debug(f"Rule not matched: {index} - current match: {current_match}") log.debug(f"Rule not matched: {index} - current match: {current_match}")
last_run_had_matches = current_match is True # Change policy: When there are no results following a successful run
if self.policy in ["change", "default"]: if current_match is True:
print("policy in change or default") # Matched before, but not now
# Change or Default policy, notifying only on new results if self.object.send_empty:
if not last_run_had_matches: rule_notify(self.object, index, "no_match", None)
print("last run did not have matches") self.store_match(index, False)
# Last run did not have matches, nor did this one await self.ingest_matches(
# We don't need to notify index=index, message={}, meta={"msg": message}, mode="schedule"
return )
elif self.policy == "always":
print("policy is always")
# Only here for completeness, we notify below by default
pass
# Matched before, but not now
if self.policy in ["change", "always"]:
print("policy in change or always")
rule_notify(self.object, index, "no_match", None)
self.store_match(index, False)
await self.ingest_matches(
index=index, matches=[{"msg": None}], meta={"msg": message}, mode="schedule"
)
async def run_schedule(self): async def run_schedule(self):
""" """

View File

@ -66,11 +66,10 @@ $(document).ready(function(){
"file_size": "off", "file_size": "off",
"lang_code": "off", "lang_code": "off",
"tokens": "off", "tokens": "off",
"rule_id": "off", "rule_uuid": "off",
"index": "off", "index": "off",
"meta": "off", "meta": "off",
"match_ts": "off", "match_ts": "off",
"batch_id": "off",
//"lang_name": "off", //"lang_name": "off",
// "words_noun": "off", // "words_noun": "off",
// "words_adj": "off", // "words_adj": "off",

View File

@ -212,12 +212,6 @@
z-index: 39 !important; z-index: 39 !important;
} }
.small-field {
overflow: hidden;
text-overflow: ellipsis;
overflow-y: hidden;
}
</style> </style>
<!-- Piwik --> {# Yes it's in the source, fight me #} <!-- Piwik --> {# Yes it's in the source, fight me #}
<script type="text/javascript"> <script type="text/javascript">

View File

@ -4,8 +4,6 @@
{% load joinsep %} {% load joinsep %}
{% load urlsafe %} {% load urlsafe %}
{% load pretty %} {% load pretty %}
{% load splitstr %}
{% block table-wrapper %} {% block table-wrapper %}
<script src="{% static 'js/column-shifter.js' %}"></script> <script src="{% static 'js/column-shifter.js' %}"></script>
<div id="drilldown-table" class="column-shifter-container" style="position:relative; z-index:1;"> <div id="drilldown-table" class="column-shifter-container" style="position:relative; z-index:1;">
@ -170,13 +168,6 @@
<p>{{ row.cells.date }}</p> <p>{{ row.cells.date }}</p>
<p>{{ row.cells.time }}</p> <p>{{ row.cells.time }}</p>
</td> </td>
{% elif column.name == 'match_ts' %}
<td class="{{ column.name }}">
{% with match_ts=cell|splitstr:'T' %}
<p>{{ match_ts.0 }}</p>
<p>{{ match_ts.1 }}</p>
{% endwith %}
</td>
{% elif column.name == 'type' or column.name == 'mtype' %} {% elif column.name == 'type' or column.name == 'mtype' %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">
<a <a
@ -385,29 +376,7 @@
</td> </td>
{% elif column.name == "meta" %} {% elif column.name == "meta" %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">
<pre class="small-field" style="cursor: pointer;">{{ cell|pretty }}</pre> <pre>{{ cell|pretty }}</pre>
</td>
{% elif 'id' in column.name %}
<td class="{{ column.name }}">
<div class="buttons">
<div class="nowrap-parent">
<!-- <input class="input" type="text" value="{{ cell }}" style="width: 50px;" readonly> -->
<a
class="has-text-grey button nowrap-child"
onclick="populateSearch('{{ column.name }}', '{{ cell|escapejs }}')">
<span class="icon" data-tooltip="Populate {{ cell }}">
<i class="fa-solid fa-arrow-left-long-to-line" aria-hidden="true"></i>
</span>
</a>
<a
class="has-text-grey button nowrap-child"
onclick="window.prompt('Copy to clipboard: Ctrl+C, Enter', '{{ cell|escapejs }}');">
<span class="icon" data-tooltip="Copy to clipboard">
<i class="fa-solid fa-copy" aria-hidden="true"></i>
</span>
</a>
</div>
</div>
</td> </td>
{% else %} {% else %}
<td class="{{ column.name }}"> <td class="{{ column.name }}">

View File

@ -6,10 +6,4 @@ register = template.Library()
@register.filter @register.filter
def pretty(data): def pretty(data):
prettified = orjson.dumps(data, option=orjson.OPT_INDENT_2).decode("utf-8") return orjson.dumps(data, option=orjson.OPT_INDENT_2).decode("utf-8")
if prettified.startswith("{"):
prettified = prettified[1:]
if prettified.endswith("}"):
prettified = prettified[:-1]
return prettified

View File

@ -1,8 +0,0 @@
from django import template
register = template.Library()
@register.filter
def splitstr(value, arg):
return value.split(arg)

View File

@ -12,6 +12,7 @@ def format_header(self):
header = header.lower() header = header.lower()
header = header.title() header = header.title()
if header != "Ident": if header != "Ident":
header = header.replace("Uuid", "UUID")
header = header.replace("Id", "ID") header = header.replace("Id", "ID")
header = header.replace("id", "ID") header = header.replace("id", "ID")
if header == "Ts": if header == "Ts":
@ -78,8 +79,7 @@ class DrilldownTable(Table):
file_md5 = Column() file_md5 = Column()
file_ext = Column() file_ext = Column()
file_size = Column() file_size = Column()
rule_id = Column() rule_uuid = Column()
batch_id = Column()
index = Column() index = Column()
meta = Column() meta = Column()
match_ts = Column() match_ts = Column()

View File

@ -20,4 +20,3 @@ django-debug-toolbar-template-profiler
orjson orjson
msgpack msgpack
apscheduler apscheduler
django-prettyjson