Fix policies not triggering properly

This commit is contained in:
Mark Veidemanis 2023-02-09 20:28:34 +00:00
parent 7b6da7b704
commit 9519c1ac9f
Signed by: m
GPG Key ID: 5ACFCEED46C0904F
4 changed files with 28 additions and 29 deletions

View File

@ -272,12 +272,10 @@ class ElasticsearchBackend(StorageBackend):
""" """
if self.async_client is None: if self.async_client is None:
await self.async_initialise() await self.async_initialise()
print("MATCHES", matches)
for match in matches: for match in matches:
result = await self.async_client.index( result = await self.async_client.index(
index=settings.INDEX_RULE_STORAGE, body=match index=settings.INDEX_RULE_STORAGE, body=match
) )
print("RESULT", result)
if not result["result"] == "created": if not result["result"] == "created":
self.log.error(f"Indexing failed: {result}") self.log.error(f"Indexing failed: {result}")
self.log.debug(f"Indexed {len(matches)} messages in ES") self.log.debug(f"Indexed {len(matches)} messages in ES")
@ -527,7 +525,6 @@ class ElasticsearchBackend(StorageBackend):
total_sources = ( total_sources = (
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED) len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
) )
print("total_count", total_count, "total_sources", total_sources)
if total_count != total_sources: if total_count != total_sources:
add_top_tmp = {"bool": {"should": []}} add_top_tmp = {"bool": {"should": []}}
for source_iter in sources: for source_iter in sources:
@ -607,7 +604,6 @@ class ElasticsearchBackend(StorageBackend):
search_query, search_query,
index=index, index=index,
) )
print("query", search_query)
if "message" in response: if "message" in response:
return response return response

View File

@ -118,7 +118,6 @@ def parse_source(user, query_params, raise_error=False):
if source: if source:
sources = [source] sources = [source]
else: else:
print("NOT SOURCE")
sources = list(settings.MAIN_SOURCES) sources = list(settings.MAIN_SOURCES)
if user.has_perm("core.restricted_sources"): if user.has_perm("core.restricted_sources"):
for source_iter in settings.SOURCES_RESTRICTED: for source_iter in settings.SOURCES_RESTRICTED:

View File

@ -260,7 +260,15 @@ class NotificationRuleData(object):
hash_matches = self.object.match.get(index) == match hash_matches = self.object.match.get(index) == match
return hash_matches return hash_matches
return self.object.match.get(index) returned_match = self.object.match.get(index, None)
if type(returned_match) == int:
# We are getting a hash from the database,
# but we have nothing to check it against.
# In this instance, we are checking if we got a match
# at all last time. We can confidently say that since
# we have a hash, we did.
returned_match = True
return returned_match
def format_aggs(self, aggs): def format_aggs(self, aggs):
""" """
@ -393,31 +401,26 @@ class NotificationRuleData(object):
""" """
current_match = self.get_match(index) current_match = self.get_match(index)
log.debug(f"Rule not matched: {index} - current match: {current_match}") log.debug(f"Rule not matched: {index} - current match: {current_match}: {message}")
last_run_had_matches = current_match is True last_run_had_matches = current_match is True
if self.policy in ["change", "default"]: initial = current_match is None
print("policy in change or default")
# Change or Default policy, notifying only on new results
if not last_run_had_matches:
print("last run did not have matches")
# Last run did not have matches, nor did this one
# We don't need to notify
return
elif self.policy == "always":
print("policy is always")
# Only here for completeness, we notify below by default
pass
# Matched before, but not now
if self.policy in ["change", "always"]:
print("policy in change or always")
rule_notify(self.object, index, "no_match", None)
self.store_match(index, False) self.store_match(index, False)
await self.ingest_matches(
index=index, matches=[{"msg": None}], meta={"msg": message}, mode="schedule" if self.policy != "always":
) # We hit the return above if we don't need to notify
if self.policy in ["change", "default"]:
if not last_run_had_matches and not initial:
# We don't need to notify if the last run didn't have matches
return
if self.policy in ["always", "change"]:
# Never notify for empty matches on default policy
rule_notify(self.object, index, "no_match", None)
await self.ingest_matches(
index=index, matches=[{"msg": None}], meta={"msg": message}, mode="schedule"
)
async def run_schedule(self): async def run_schedule(self):
""" """
@ -428,12 +431,13 @@ class NotificationRuleData(object):
response = await self.db.schedule_query_results(self) response = await self.db.schedule_query_results(self)
if not response: if not response:
# No results in the result_map # No results in the result_map
print("No results in result_map")
await self.rule_no_match(message="No response from database") await self.rule_no_match(message="No response from database")
return
for index, (meta, results) in response.items(): for index, (meta, results) in response.items():
if not results: if not results:
# Falsy results, no matches # Falsy results, no matches
await self.rule_no_match(index, message="No results for index") await self.rule_no_match(index, message="No results for index")
continue
# Add the match values of all aggregations to a list # Add the match values of all aggregations to a list
aggs_for_index = [] aggs_for_index = []

View File

@ -73,7 +73,7 @@ class RuleClear(LoginRequiredMixin, PermissionRequiredMixin, APIView):
rule = NotificationRule.objects.get(pk=pk, user=request.user) rule = NotificationRule.objects.get(pk=pk, user=request.user)
if isinstance(rule.match, dict): if isinstance(rule.match, dict):
for index in rule.match: for index in rule.match:
rule.match[index] = False rule.match[index] = None
rule.save() rule.save()
cleared_indices = ", ".join(rule.match) cleared_indices = ", ".join(rule.match)