import asyncio from os import getenv import orjson import db import util from processing import process # SOURCES = ["4ch", "irc", "dis"] # DEBUG CODE REMOVE ME # SOURCES.remove("4ch") # SOURCES.remove("dis") # DEBUG CODE REMOVE ME # KEYPREFIX = "queue." KEYNAME = "queue" # Chunk size per source (divide by len(SOURCES) for total) CHUNK_SIZE = int(getenv("MONOLITH_INGEST_CHUNK_SIZE", "900")) ITER_DELAY = float(getenv("MONOLITH_INGEST_ITER_DELAY", "0.5")) log = util.get_logger("ingest") class Ingest(object): def __init__(self): name = self.__class__.__name__ self.log = util.get_logger(name) self.log.info( ( "Starting ingest handler for chunk size of " f"{CHUNK_SIZE} every {ITER_DELAY} seconds." ) ) async def run(self): while True: await self.get_chunk() await asyncio.sleep(ITER_DELAY) async def get_chunk(self): items = [] # for source in SOURCES: # key = f"{KEYPREFIX}{source}" chunk = await db.ar.spop(KEYNAME, CHUNK_SIZE) if not chunk: return for item in chunk: item = orjson.loads(item) items.append(item) if items: await process.spawn_processing_threads(items)