Implement indexing into Apache Druid #1

Closed
m wants to merge 263 commits from druid into master
3 changed files with 4 additions and 4 deletions
Showing only changes of commit a4dae2a583 - Show all commits

View File

@ -5,5 +5,5 @@ redis
pyYaML pyYaML
python-logstash python-logstash
service_identity service_identity
csiphash siphashc
Klein Klein

View File

@ -6,5 +6,5 @@ redis
pyYaML pyYaML
python-logstash python-logstash
service_identity service_identity
csiphash siphashc
Klein Klein

View File

@ -2,7 +2,7 @@ from copy import deepcopy
from datetime import datetime from datetime import datetime
from json import dumps from json import dumps
from csiphash import siphash24 from siphashc import siphash
import main import main
from utils.logging.debug import debug from utils.logging.debug import debug
@ -13,7 +13,7 @@ def dedup(numName, b):
if "ts" in c.keys(): if "ts" in c.keys():
del c["ts"] del c["ts"]
c["approxtime"] = str(datetime.utcnow().timestamp())[: main.config["Tweaks"]["DedupPrecision"]] c["approxtime"] = str(datetime.utcnow().timestamp())[: main.config["Tweaks"]["DedupPrecision"]]
castHash = siphash24(main.hashKey, dumps(c, sort_keys=True).encode("utf-8")) castHash = siphash(main.hashKey, dumps(c, sort_keys=True))
del c["approxtime"] del c["approxtime"]
isDuplicate = any(castHash in main.lastEvents[x] for x in main.lastEvents.keys() if not x == numName) isDuplicate = any(castHash in main.lastEvents[x] for x in main.lastEvents.keys() if not x == numName)
if isDuplicate: if isDuplicate: