Implement running scheduled rules and check aggregations

This commit is contained in:
2023-01-15 17:59:12 +00:00
parent 435d9b5571
commit 6bfa0aa73b
15 changed files with 600 additions and 258 deletions

View File

@@ -6,6 +6,14 @@ from django.conf import settings
from core.db import StorageBackend, add_defaults
from core.db.processing import parse_druid
from core.lib.parsing import (
parse_date_time,
parse_index,
parse_sentiment,
parse_size,
parse_sort,
parse_source,
)
logger = logging.getLogger(__name__)
@@ -155,12 +163,12 @@ class DruidBackend(StorageBackend):
else:
sizes = settings.MAIN_SIZES
if not size:
size = self.parse_size(query_params, sizes)
size = parse_size(query_params, sizes)
if isinstance(size, dict):
return size
# I - Index
index = self.parse_index(request.user, query_params)
index = parse_index(request.user, query_params)
if isinstance(index, dict):
return index
@@ -173,7 +181,7 @@ class DruidBackend(StorageBackend):
return search_query
# S - Sources
sources = self.parse_source(request.user, query_params)
sources = parse_source(request.user, query_params)
if isinstance(sources, dict):
return sources
total_count = len(sources)
@@ -182,20 +190,20 @@ class DruidBackend(StorageBackend):
add_in["src"] = sources
# R - Ranges
from_ts, to_ts = self.parse_date_time(query_params)
from_ts, to_ts = parse_date_time(query_params)
if from_ts:
addendum = f"{from_ts}/{to_ts}"
search_query["intervals"] = [addendum]
# S - Sort
sort = self.parse_sort(query_params)
sort = parse_sort(query_params)
if isinstance(sort, dict):
return sort
if sort:
search_query["order"] = sort
# S - Sentiment
sentiment_r = self.parse_sentiment(query_params)
sentiment_r = parse_sentiment(query_params)
if isinstance(sentiment_r, dict):
return sentiment_r
if sentiment_r: