Use only one Redis key for the queue to make chunk size more precise for thread allocation
This commit is contained in:
@@ -101,18 +101,17 @@ hash_key = get_hash_key()
|
||||
@asyncio.coroutine
|
||||
async def spawn_processing_threads(data):
|
||||
len_data = len(data)
|
||||
# log.debug(f"Spawning processing threads for batch of {len_data} messages")
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
tasks = []
|
||||
|
||||
if len(data) < CPU_THREADS:
|
||||
if len(data) < CPU_THREADS * 100:
|
||||
split_data = [data]
|
||||
else:
|
||||
msg_per_core = int(len(data) / CPU_THREADS)
|
||||
split_data = array_split(data, ceil(len(data) / msg_per_core))
|
||||
for index, split in enumerate(split_data):
|
||||
# log.debug(f"Delegating processing of {len(split)} messages to thread {index}")
|
||||
log.debug(f"Delegating processing of {len(split)} messages to thread {index}")
|
||||
task = loop.run_in_executor(p, process_data, split)
|
||||
tasks.append(task)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user