import asyncio from os import getenv import orjson import db import util from processing import process KEYNAME = "queue" # Chunk size CHUNK_SIZE = int(getenv("MONOLITH_INGEST_CHUNK_SIZE", "900")) ITER_DELAY = float(getenv("MONOLITH_INGEST_ITER_DELAY", "0.5")) log = util.get_logger("ingest") class Ingest(object): def __init__(self): name = self.__class__.__name__ self.log = util.get_logger(name) self.log.info( ( "Starting ingest handler for chunk size of " f"{CHUNK_SIZE} every {ITER_DELAY} seconds." ) ) async def run(self): while True: await self.get_chunk() await asyncio.sleep(ITER_DELAY) async def get_chunk(self): items = [] # for source in SOURCES: # key = f"{KEYPREFIX}{source}" length = await db.ar.llen(KEYNAME) start_num = length - CHUNK_SIZE chunk = await db.ar.lrange(KEYNAME, start_num, -1) # chunk = await db.ar.rpop(KEYNAME, CHUNK_SIZE) if not chunk: return for item in chunk: item = orjson.loads(item) items.append(item) if items: await process.spawn_processing_threads(items)