Use only one Redis key for the queue to make chunk size more precise for thread allocation

This commit is contained in:
2022-09-30 07:22:22 +01:00
parent a5d29606e9
commit 02ff44a6f5
7 changed files with 25 additions and 26 deletions

View File

@@ -101,18 +101,17 @@ hash_key = get_hash_key()
@asyncio.coroutine
async def spawn_processing_threads(data):
len_data = len(data)
# log.debug(f"Spawning processing threads for batch of {len_data} messages")
loop = asyncio.get_event_loop()
tasks = []
if len(data) < CPU_THREADS:
if len(data) < CPU_THREADS * 100:
split_data = [data]
else:
msg_per_core = int(len(data) / CPU_THREADS)
split_data = array_split(data, ceil(len(data) / msg_per_core))
for index, split in enumerate(split_data):
# log.debug(f"Delegating processing of {len(split)} messages to thread {index}")
log.debug(f"Delegating processing of {len(split)} messages to thread {index}")
task = loop.run_in_executor(p, process_data, split)
tasks.append(task)