2022-09-07 06:20:30 +00:00
|
|
|
import asyncio
|
|
|
|
|
|
|
|
import ujson
|
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
import db
|
|
|
|
import util
|
|
|
|
|
2022-09-13 21:17:46 +00:00
|
|
|
from processing import process
|
|
|
|
|
|
|
|
SOURCES = ["irc", "dis", "4ch"]
|
2022-09-07 06:20:30 +00:00
|
|
|
KEYPREFIX = "queue."
|
|
|
|
CHUNK_SIZE = 1000
|
|
|
|
ITER_DELAY = 0.5
|
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
|
2022-09-13 21:17:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2022-09-07 06:20:30 +00:00
|
|
|
class Ingest(object):
|
|
|
|
def __init__(self):
|
|
|
|
name = self.__class__.__name__
|
|
|
|
self.log = util.get_logger(name)
|
|
|
|
|
|
|
|
async def run(self):
|
|
|
|
while True:
|
2022-09-13 21:17:46 +00:00
|
|
|
await self.get_chunk()
|
2022-09-07 06:20:30 +00:00
|
|
|
await asyncio.sleep(ITER_DELAY)
|
|
|
|
|
2022-09-13 21:17:46 +00:00
|
|
|
async def get_chunk(self):
|
2022-09-07 06:20:30 +00:00
|
|
|
items = []
|
|
|
|
for source in SOURCES:
|
|
|
|
key = f"{KEYPREFIX}{source}"
|
|
|
|
chunk = await db.ar.spop(key, CHUNK_SIZE)
|
|
|
|
if not chunk:
|
|
|
|
continue
|
2022-09-13 21:17:46 +00:00
|
|
|
#self.log.info(f"Got chunk: {chunk}")
|
2022-09-07 06:20:30 +00:00
|
|
|
for item in chunk:
|
|
|
|
item = ujson.loads(item)
|
2022-09-13 21:17:46 +00:00
|
|
|
#self.log.info(f"Got item: {item}")
|
2022-09-07 06:20:30 +00:00
|
|
|
items.append(item)
|
2022-09-13 21:17:46 +00:00
|
|
|
if items:
|
|
|
|
print("PROCESSING", len(items))
|
|
|
|
await process.spawn_processing_threads(items)
|
|
|
|
print("DONE WITH PROCESSING", len(items))
|
|
|
|
await db.store_kafka_batch(items)
|