Add cache indicator, time the fetch from Redis and round it
This commit is contained in:
parent
02e1b4698d
commit
5be02807e3
|
@ -1,12 +1,22 @@
|
|||
import json
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
from datetime import datetime
|
||||
from math import floor, log10
|
||||
from pprint import pprint
|
||||
|
||||
import manticoresearch
|
||||
from django.conf import settings
|
||||
from siphashc import siphash
|
||||
|
||||
from core import r
|
||||
from core.lib.processing import annotate_results, filter_blacklisted, parse_results
|
||||
from core.views import helpers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialise_manticore():
|
||||
"""
|
||||
|
@ -22,6 +32,22 @@ def initialise_manticore():
|
|||
api_client, client = initialise_manticore()
|
||||
|
||||
|
||||
def initialise_caching():
|
||||
hash_key = r.get("cache_hash_key")
|
||||
if not hash_key:
|
||||
letters = string.ascii_lowercase
|
||||
hash_key = "".join(random.choice(letters) for i in range(16))
|
||||
logger.debug(f"Created new hash key: {hash_key}")
|
||||
r.set("cache_hash_key", hash_key)
|
||||
else:
|
||||
hash_key = hash_key.decode("ascii")
|
||||
logger.debug(f"Decoded hash key: {hash_key}")
|
||||
return hash_key
|
||||
|
||||
|
||||
hash_key = initialise_caching()
|
||||
|
||||
|
||||
def construct_query(query, size, index, blank=False):
|
||||
"""
|
||||
Accept some query parameters and construct an OpenSearch query.
|
||||
|
@ -42,9 +68,27 @@ def construct_query(query, size, index, blank=False):
|
|||
|
||||
|
||||
def run_query(client, user, search_query):
|
||||
start = time.process_time()
|
||||
query_normalised = json.dumps(search_query, sort_keys=True)
|
||||
hash = siphash(hash_key, query_normalised)
|
||||
cache_hit = r.get(f"query_cache.{user.id}.{hash}")
|
||||
if cache_hit:
|
||||
print("Cache hit")
|
||||
response = json.loads(cache_hit)
|
||||
time_took = (time.process_time() - start) * 1000
|
||||
# Round to 3 significant figures
|
||||
time_took_rounded = round(time_took, 3 - int(floor(log10(abs(time_took)))) - 1)
|
||||
response["took"] = time_took_rounded
|
||||
response["cache"] = True
|
||||
return response
|
||||
response = client.search(search_query)
|
||||
response = response.to_dict()
|
||||
filter_blacklisted(user, response)
|
||||
print("Writing to cache")
|
||||
to_write_cache = json.dumps(response)
|
||||
r.set(f"query_cache.{user.id}.{hash}", to_write_cache)
|
||||
r.expire(f"query_cache.{user.id}.{hash}", 30)
|
||||
print("Written to cache")
|
||||
return response
|
||||
|
||||
|
||||
|
@ -166,7 +210,9 @@ def query_results(
|
|||
for source_iter in sources:
|
||||
add_top_tmp["bool"]["should"].append({"equals": {"src": source_iter}})
|
||||
total_count += 1
|
||||
total_sources = len(settings.MANTICORE_MAIN_SOURCES) + len(settings.MANTICORE_SOURCES_RESTRICTED)
|
||||
total_sources = len(settings.MANTICORE_MAIN_SOURCES) + len(
|
||||
settings.MANTICORE_SOURCES_RESTRICTED
|
||||
)
|
||||
if not total_count == total_sources:
|
||||
add_top.append(add_top_tmp)
|
||||
|
||||
|
@ -293,4 +339,6 @@ def query_results(
|
|||
"card": results["hits"]["total"],
|
||||
"took": results["took"],
|
||||
}
|
||||
if "cache" in results:
|
||||
context["cache"] = results["cache"]
|
||||
return context
|
||||
|
|
Loading…
Reference in New Issue