monolith/sources/ingest.py

42 lines
1.4 KiB
Python
Raw Normal View History

2022-09-07 06:20:30 +00:00
import asyncio
import ujson
import db
import util
from processing import process
SOURCES = ["4ch", "irc", "dis"]
KEYPREFIX = "queue."
CHUNK_SIZE = 90000
ITER_DELAY = 0.5
2022-09-07 06:20:30 +00:00
class Ingest(object):
def __init__(self):
name = self.__class__.__name__
self.log = util.get_logger(name)
async def run(self):
# items = [{'no': 23567753, 'now': '09/12/22(Mon)20:10:29', 'name': 'Anonysmous', 'filename': '1644986767568', 'ext': '.webm', 'w': 1280, 'h': 720, 'tn_w': 125, 'tn_h': 70, 'tim': 1663027829301457, 'time': 1663027829, 'md5': 'zeElr1VR05XpZ2XuAPhmPA==', 'fsize': 3843621, 'resto': 23554700, 'type': 'msg', 'src': '4ch', 'net': 'gif', 'channel': '23554700'}]
# await process.spawn_processing_threads(items)
while True:
await self.get_chunk()
await asyncio.sleep(ITER_DELAY)
async def get_chunk(self):
items = []
for source in SOURCES:
key = f"{KEYPREFIX}{source}"
chunk = await db.ar.spop(key, CHUNK_SIZE)
if not chunk:
continue
# self.log.info(f"Got chunk: {chunk}")
for item in chunk:
item = ujson.loads(item)
# self.log.info(f"Got item: {item}")
items.append(item)
if items:
print("PROCESSING", len(items))
await process.spawn_processing_threads(items)