2022-09-07 06:20:30 +00:00
|
|
|
import asyncio
|
2022-09-20 22:03:02 +00:00
|
|
|
from os import getenv
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-16 16:09:49 +00:00
|
|
|
import orjson
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
import db
|
|
|
|
import util
|
2022-09-13 21:17:46 +00:00
|
|
|
from processing import process
|
|
|
|
|
2022-09-30 06:22:22 +00:00
|
|
|
# SOURCES = ["4ch", "irc", "dis"]
|
2022-09-21 09:01:12 +00:00
|
|
|
# DEBUG CODE REMOVE ME
|
2022-09-21 09:02:05 +00:00
|
|
|
# SOURCES.remove("4ch")
|
|
|
|
# SOURCES.remove("dis")
|
2022-09-21 09:01:12 +00:00
|
|
|
# DEBUG CODE REMOVE ME
|
2022-09-30 06:22:22 +00:00
|
|
|
# KEYPREFIX = "queue."
|
|
|
|
KEYNAME = "queue"
|
2022-09-16 16:09:49 +00:00
|
|
|
|
|
|
|
# Chunk size per source (divide by len(SOURCES) for total)
|
2022-09-20 17:13:46 +00:00
|
|
|
CHUNK_SIZE = int(getenv("MONOLITH_INGEST_CHUNK_SIZE", "900"))
|
|
|
|
ITER_DELAY = float(getenv("MONOLITH_INGEST_ITER_DELAY", "0.5"))
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-16 16:09:49 +00:00
|
|
|
log = util.get_logger("ingest")
|
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
class Ingest(object):
|
|
|
|
def __init__(self):
|
|
|
|
name = self.__class__.__name__
|
|
|
|
self.log = util.get_logger(name)
|
2022-09-23 07:32:29 +00:00
|
|
|
self.log.info(
|
|
|
|
(
|
|
|
|
"Starting ingest handler for chunk size of "
|
|
|
|
f"{CHUNK_SIZE} every {ITER_DELAY} seconds."
|
|
|
|
)
|
|
|
|
)
|
2022-09-07 06:20:30 +00:00
|
|
|
|
|
|
|
async def run(self):
|
|
|
|
while True:
|
2022-09-13 21:17:46 +00:00
|
|
|
await self.get_chunk()
|
2022-09-07 06:20:30 +00:00
|
|
|
await asyncio.sleep(ITER_DELAY)
|
|
|
|
|
2022-09-13 21:17:46 +00:00
|
|
|
async def get_chunk(self):
|
2022-09-07 06:20:30 +00:00
|
|
|
items = []
|
2022-09-30 06:22:22 +00:00
|
|
|
# for source in SOURCES:
|
|
|
|
# key = f"{KEYPREFIX}{source}"
|
|
|
|
chunk = await db.ar.spop(KEYNAME, CHUNK_SIZE)
|
|
|
|
if not chunk:
|
|
|
|
return
|
|
|
|
for item in chunk:
|
|
|
|
item = orjson.loads(item)
|
|
|
|
items.append(item)
|
2022-09-13 21:17:46 +00:00
|
|
|
if items:
|
|
|
|
await process.spawn_processing_threads(items)
|