2022-09-07 06:20:30 +00:00
|
|
|
import asyncio
|
2022-09-20 22:03:02 +00:00
|
|
|
from os import getenv
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-16 16:09:49 +00:00
|
|
|
import orjson
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
import db
|
|
|
|
import util
|
2022-09-13 21:17:46 +00:00
|
|
|
from processing import process
|
|
|
|
|
2022-09-14 17:32:32 +00:00
|
|
|
SOURCES = ["4ch", "irc", "dis"]
|
2022-09-07 06:20:30 +00:00
|
|
|
KEYPREFIX = "queue."
|
2022-09-16 16:09:49 +00:00
|
|
|
|
|
|
|
# Chunk size per source (divide by len(SOURCES) for total)
|
2022-09-20 17:13:46 +00:00
|
|
|
CHUNK_SIZE = int(getenv("MONOLITH_INGEST_CHUNK_SIZE", "900"))
|
|
|
|
ITER_DELAY = float(getenv("MONOLITH_INGEST_ITER_DELAY", "0.5"))
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-16 16:09:49 +00:00
|
|
|
log = util.get_logger("ingest")
|
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
class Ingest(object):
|
|
|
|
def __init__(self):
|
|
|
|
name = self.__class__.__name__
|
|
|
|
self.log = util.get_logger(name)
|
|
|
|
|
|
|
|
async def run(self):
|
|
|
|
while True:
|
2022-09-13 21:17:46 +00:00
|
|
|
await self.get_chunk()
|
2022-09-07 06:20:30 +00:00
|
|
|
await asyncio.sleep(ITER_DELAY)
|
|
|
|
|
2022-09-13 21:17:46 +00:00
|
|
|
async def get_chunk(self):
|
2022-09-07 06:20:30 +00:00
|
|
|
items = []
|
|
|
|
for source in SOURCES:
|
|
|
|
key = f"{KEYPREFIX}{source}"
|
|
|
|
chunk = await db.ar.spop(key, CHUNK_SIZE)
|
|
|
|
if not chunk:
|
|
|
|
continue
|
|
|
|
for item in chunk:
|
2022-09-16 16:09:49 +00:00
|
|
|
item = orjson.loads(item)
|
2022-09-07 06:20:30 +00:00
|
|
|
items.append(item)
|
2022-09-13 21:17:46 +00:00
|
|
|
if items:
|
|
|
|
await process.spawn_processing_threads(items)
|