Make things lowercase, improve search queries for rules
This commit is contained in:
parent
455da73b95
commit
fd10a4ba8e
|
@ -20,7 +20,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class DruidBackend(StorageBackend):
|
class DruidBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Druid")
|
super().__init__("druid")
|
||||||
|
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
# self.client = PyDruid("http://broker:8082", "druid/v2")
|
# self.client = PyDruid("http://broker:8082", "druid/v2")
|
||||||
|
|
|
@ -33,7 +33,7 @@ mapping = {
|
||||||
"ts": {"type": "date", "format": "epoch_second"},
|
"ts": {"type": "date", "format": "epoch_second"},
|
||||||
"match_ts": {"type": "date", "format": "iso8601"},
|
"match_ts": {"type": "date", "format": "iso8601"},
|
||||||
"file_tim": {"type": "date", "format": "epoch_millis"},
|
"file_tim": {"type": "date", "format": "epoch_millis"},
|
||||||
"rule_uuid": {"type": "keyword"},
|
"rule_id": {"type": "keyword"},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ for field in keyword_fields:
|
||||||
|
|
||||||
class ElasticsearchBackend(StorageBackend):
|
class ElasticsearchBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Elasticsearch")
|
super().__init__("elasticsearch")
|
||||||
self.client = None
|
self.client = None
|
||||||
self.async_client = None
|
self.async_client = None
|
||||||
|
|
||||||
|
@ -272,10 +272,12 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
"""
|
"""
|
||||||
if self.async_client is None:
|
if self.async_client is None:
|
||||||
await self.async_initialise()
|
await self.async_initialise()
|
||||||
|
print("MATCHES", matches)
|
||||||
for match in matches:
|
for match in matches:
|
||||||
result = await self.async_client.index(
|
result = await self.async_client.index(
|
||||||
index=settings.INDEX_RULE_STORAGE, body=match
|
index=settings.INDEX_RULE_STORAGE, body=match
|
||||||
)
|
)
|
||||||
|
print("RESULT", result)
|
||||||
if not result["result"] == "created":
|
if not result["result"] == "created":
|
||||||
self.log.error(f"Indexing failed: {result}")
|
self.log.error(f"Indexing failed: {result}")
|
||||||
self.log.debug(f"Indexed {len(matches)} messages in ES")
|
self.log.debug(f"Indexed {len(matches)} messages in ES")
|
||||||
|
@ -439,6 +441,7 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
raise QueryError(error)
|
raise QueryError(error)
|
||||||
if len(response["hits"]["hits"]) == 0:
|
if len(response["hits"]["hits"]) == 0:
|
||||||
# No results, skip
|
# No results, skip
|
||||||
|
result_map[index] = ({}, [])
|
||||||
continue
|
continue
|
||||||
meta, response = self.parse(response, meta=True)
|
meta, response = self.parse(response, meta=True)
|
||||||
# print("Parsed response", response)
|
# print("Parsed response", response)
|
||||||
|
@ -500,7 +503,7 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
|
|
||||||
if rule_object is not None:
|
if rule_object is not None:
|
||||||
index = settings.INDEX_RULE_STORAGE
|
index = settings.INDEX_RULE_STORAGE
|
||||||
add_bool.append({"rule_uuid": str(rule_object.id)})
|
add_bool.append({"rule_id": str(rule_object.id)})
|
||||||
else:
|
else:
|
||||||
# I - Index
|
# I - Index
|
||||||
index = parse_index(request.user, query_params)
|
index = parse_index(request.user, query_params)
|
||||||
|
@ -520,13 +523,18 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
if isinstance(sources, dict):
|
if isinstance(sources, dict):
|
||||||
return sources
|
return sources
|
||||||
total_count = len(sources)
|
total_count = len(sources)
|
||||||
total_sources = len(settings.MAIN_SOURCES) + len(settings.SOURCES_RESTRICTED)
|
# Total is -1 due to the "all" source
|
||||||
|
total_sources = (
|
||||||
|
len(settings.MAIN_SOURCES) - 1 + len(settings.SOURCES_RESTRICTED)
|
||||||
|
)
|
||||||
|
print("total_count", total_count, "total_sources", total_sources)
|
||||||
if total_count != total_sources:
|
if total_count != total_sources:
|
||||||
add_top_tmp = {"bool": {"should": []}}
|
add_top_tmp = {"bool": {"should": []}}
|
||||||
for source_iter in sources:
|
for source_iter in sources:
|
||||||
add_top_tmp["bool"]["should"].append(
|
add_top_tmp["bool"]["should"].append(
|
||||||
{"match_phrase": {"src": source_iter}}
|
{"match_phrase": {"src": source_iter}}
|
||||||
)
|
)
|
||||||
|
if rule_object is not None and query_params["source"] != "all":
|
||||||
add_top.append(add_top_tmp)
|
add_top.append(add_top_tmp)
|
||||||
|
|
||||||
# R - Ranges
|
# R - Ranges
|
||||||
|
@ -547,12 +555,17 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
sort = parse_sort(query_params)
|
sort = parse_sort(query_params)
|
||||||
if isinstance(sort, dict):
|
if isinstance(sort, dict):
|
||||||
return sort
|
return sort
|
||||||
|
|
||||||
|
if rule_object is not None:
|
||||||
|
field = "match_ts"
|
||||||
|
else:
|
||||||
|
field = "ts"
|
||||||
if sort:
|
if sort:
|
||||||
# For Druid compatibility
|
# For Druid compatibility
|
||||||
sort_map = {"ascending": "asc", "descending": "desc"}
|
sort_map = {"ascending": "asc", "descending": "desc"}
|
||||||
sorting = [
|
sorting = [
|
||||||
{
|
{
|
||||||
"ts": {
|
field: {
|
||||||
"order": sort_map[sort],
|
"order": sort_map[sort],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -594,6 +607,7 @@ class ElasticsearchBackend(StorageBackend):
|
||||||
search_query,
|
search_query,
|
||||||
index=index,
|
index=index,
|
||||||
)
|
)
|
||||||
|
print("query", search_query)
|
||||||
if "message" in response:
|
if "message" in response:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class ManticoreBackend(StorageBackend):
|
class ManticoreBackend(StorageBackend):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("Manticore")
|
super().__init__("manticore")
|
||||||
|
|
||||||
def initialise(self, **kwargs):
|
def initialise(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in New Issue