Remove redaction stuff
This commit is contained in:
parent
cc20c545dd
commit
bdee5a2aae
|
@ -1,21 +1,12 @@
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
# from copy import deepcopy
|
||||
# from datetime import datetime, timedelta
|
||||
|
||||
from django.conf import settings
|
||||
from opensearchpy import OpenSearch
|
||||
from opensearchpy.exceptions import NotFoundError, RequestError
|
||||
|
||||
from core.lib.threshold import annotate_num_chans, annotate_num_users, annotate_online
|
||||
from core.views.helpers import (
|
||||
LookupDenied,
|
||||
SearchDenied,
|
||||
dedup_list,
|
||||
encrypt_list,
|
||||
hash_list,
|
||||
hash_lookup,
|
||||
obfuscate_list,
|
||||
randomise_list,
|
||||
)
|
||||
from core.views.helpers import dedup_list
|
||||
|
||||
# from json import dumps
|
||||
# pp = lambda x: print(dumps(x, indent=2))
|
||||
|
@ -216,9 +207,7 @@ def construct_query(query, size, use_query_string=True, tokens=False):
|
|||
return query_base
|
||||
|
||||
|
||||
def run_main_query(
|
||||
client, user, query, custom_query=False, index=None, size=None, filter=True
|
||||
):
|
||||
def run_main_query(client, user, query, custom_query=False, index=None, size=None):
|
||||
"""
|
||||
Low level helper to run an ES query.
|
||||
Accept a user to pass it to the filter, so we can
|
||||
|
@ -240,8 +229,7 @@ def run_main_query(
|
|||
except NotFoundError as err:
|
||||
print("OpenSearch error", err)
|
||||
return err
|
||||
if filter:
|
||||
filter_blacklisted(user, response)
|
||||
filter_blacklisted(user, response)
|
||||
return response
|
||||
|
||||
|
||||
|
@ -319,27 +307,27 @@ def query_results(
|
|||
query_created = False
|
||||
|
||||
# Lookup the hash values but don't disclose them to the user
|
||||
denied = []
|
||||
if lookup_hashes:
|
||||
if settings.HASHING:
|
||||
query_params = deepcopy(query_params)
|
||||
denied_q = hash_lookup(request.user, query_params)
|
||||
denied.extend(denied_q)
|
||||
if tags:
|
||||
denied_t = hash_lookup(request.user, tags, query_params)
|
||||
denied.extend(denied_t)
|
||||
# denied = []
|
||||
# if lookup_hashes:
|
||||
# if settings.HASHING:
|
||||
# query_params = deepcopy(query_params)
|
||||
# denied_q = hash_lookup(request.user, query_params)
|
||||
# denied.extend(denied_q)
|
||||
# if tags:
|
||||
# denied_t = hash_lookup(request.user, tags, query_params)
|
||||
# denied.extend(denied_t)
|
||||
|
||||
message = "Permission denied: "
|
||||
for x in denied:
|
||||
if isinstance(x, SearchDenied):
|
||||
message += f"Search({x.key}: {x.value}) "
|
||||
elif isinstance(x, LookupDenied):
|
||||
message += f"Lookup({x.key}: {x.value}) "
|
||||
if denied:
|
||||
# message = [f"{i}" for i in message]
|
||||
# message = "\n".join(message)
|
||||
message_class = "danger"
|
||||
return {"message": message, "class": message_class}
|
||||
# message = "Permission denied: "
|
||||
# for x in denied:
|
||||
# if isinstance(x, SearchDenied):
|
||||
# message += f"Search({x.key}: {x.value}) "
|
||||
# elif isinstance(x, LookupDenied):
|
||||
# message += f"Lookup({x.key}: {x.value}) "
|
||||
# if denied:
|
||||
# # message = [f"{i}" for i in message]
|
||||
# # message = "\n".join(message)
|
||||
# message_class = "danger"
|
||||
# return {"message": message, "class": message_class}
|
||||
|
||||
if request.user.is_anonymous:
|
||||
sizes = settings.OPENSEARCH_MAIN_SIZES_ANON
|
||||
|
@ -357,14 +345,36 @@ def query_results(
|
|||
source = None
|
||||
if "source" in query_params:
|
||||
source = query_params["source"]
|
||||
if source not in settings.OPENSEARCH_MAIN_SOURCES:
|
||||
|
||||
if source in settings.OPENSEARCH_SOURCES_RESTRICTED:
|
||||
if not request.user.has_perm("restricted_sources"):
|
||||
message = "Access denied"
|
||||
message_class = "danger"
|
||||
return {"message": message, "class": message_class}
|
||||
elif source not in settings.OPENSEARCH_MAIN_SOURCES:
|
||||
message = "Invalid source"
|
||||
message_class = "danger"
|
||||
return {"message": message, "class": message_class}
|
||||
if source != "all":
|
||||
add_bool.append({"src": source})
|
||||
|
||||
date_query = False
|
||||
if source == "all":
|
||||
source = None # the next block will populate it
|
||||
|
||||
if source:
|
||||
sources = [source]
|
||||
else:
|
||||
sources = settings.OPENSEARCH_MAIN_SOURCES
|
||||
if request.user.has_perm("restricted_sources"):
|
||||
for source_iter in settings.OPENSEARCH_SOURCES_RESTRICTED:
|
||||
sources.append(source_iter)
|
||||
else:
|
||||
sources = settings.OPENSEARCH_MAIN_SOURCES
|
||||
|
||||
add_top_tmp = {"bool": {"should": []}}
|
||||
for source_iter in sources:
|
||||
add_top_tmp["bool"]["should"].append({"match_phrase": {"src": source_iter}})
|
||||
add_top.append(add_top_tmp)
|
||||
|
||||
# date_query = False
|
||||
if set({"from_date", "to_date", "from_time", "to_time"}).issubset(
|
||||
query_params.keys()
|
||||
):
|
||||
|
@ -378,40 +388,40 @@ def query_results(
|
|||
}
|
||||
}
|
||||
}
|
||||
date_query = True
|
||||
add_top.append(range_query)
|
||||
|
||||
if date_query:
|
||||
if settings.DELAY_RESULTS:
|
||||
if source not in settings.SAFE_SOURCES:
|
||||
if request.user.has_perm("core.bypass_delay"):
|
||||
add_top.append(range_query)
|
||||
else:
|
||||
delay_as_ts = datetime.now() - timedelta(
|
||||
days=settings.DELAY_DURATION
|
||||
)
|
||||
lt_as_ts = datetime.strptime(
|
||||
range_query["range"]["ts"]["lt"], "%Y-%m-%dT%H:%MZ"
|
||||
)
|
||||
if lt_as_ts > delay_as_ts:
|
||||
range_query["range"]["ts"][
|
||||
"lt"
|
||||
] = f"now-{settings.DELAY_DURATION}d"
|
||||
add_top.append(range_query)
|
||||
else:
|
||||
add_top.append(range_query)
|
||||
else:
|
||||
if settings.DELAY_RESULTS:
|
||||
if source not in settings.SAFE_SOURCES:
|
||||
if not request.user.has_perm("core.bypass_delay"):
|
||||
range_query = {
|
||||
"range": {
|
||||
"ts": {
|
||||
# "gt": ,
|
||||
"lt": f"now-{settings.DELAY_DURATION}d",
|
||||
}
|
||||
}
|
||||
}
|
||||
add_top.append(range_query)
|
||||
# if date_query:
|
||||
# if settings.DELAY_RESULTS:
|
||||
# if source not in settings.SAFE_SOURCES:
|
||||
# if request.user.has_perm("core.bypass_delay"):
|
||||
# add_top.append(range_query)
|
||||
# else:
|
||||
# delay_as_ts = datetime.now() - timedelta(
|
||||
# days=settings.DELAY_DURATION
|
||||
# )
|
||||
# lt_as_ts = datetime.strptime(
|
||||
# range_query["range"]["ts"]["lt"], "%Y-%m-%dT%H:%MZ"
|
||||
# )
|
||||
# if lt_as_ts > delay_as_ts:
|
||||
# range_query["range"]["ts"][
|
||||
# "lt"
|
||||
# ] = f"now-{settings.DELAY_DURATION}d"
|
||||
# add_top.append(range_query)
|
||||
# else:
|
||||
# add_top.append(range_query)
|
||||
# else:
|
||||
# if settings.DELAY_RESULTS:
|
||||
# if source not in settings.SAFE_SOURCES:
|
||||
# if not request.user.has_perm("core.bypass_delay"):
|
||||
# range_query = {
|
||||
# "range": {
|
||||
# "ts": {
|
||||
# # "gt": ,
|
||||
# "lt": f"now-{settings.DELAY_DURATION}d",
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
# add_top.append(range_query)
|
||||
|
||||
if "sorting" in query_params:
|
||||
sorting = query_params["sorting"]
|
||||
|
@ -469,13 +479,13 @@ def query_results(
|
|||
query_created = True
|
||||
elif "query_full" in query_params:
|
||||
query_full = query_params["query_full"]
|
||||
if request.user.has_perm("core.query_search"):
|
||||
search_query = construct_query(query_full, size)
|
||||
query_created = True
|
||||
else:
|
||||
message = "You cannot search by query string"
|
||||
message_class = "danger"
|
||||
return {"message": message, "class": message_class}
|
||||
# if request.user.has_perm("core.query_search"):
|
||||
search_query = construct_query(query_full, size)
|
||||
query_created = True
|
||||
# else:
|
||||
# message = "You cannot search by query string"
|
||||
# message_class = "danger"
|
||||
# return {"message": message, "class": message_class}
|
||||
else:
|
||||
if custom_query:
|
||||
search_query = custom_query
|
||||
|
@ -542,9 +552,6 @@ def query_results(
|
|||
else:
|
||||
index = settings.OPENSEARCH_INDEX_MAIN
|
||||
|
||||
filter = True
|
||||
if source in settings.SAFE_SOURCES:
|
||||
filter = False
|
||||
results = run_main_query(
|
||||
client,
|
||||
request.user, # passed through run_main_query to filter_blacklisted
|
||||
|
@ -552,7 +559,6 @@ def query_results(
|
|||
custom_query=True,
|
||||
index=index,
|
||||
size=size,
|
||||
filter=filter,
|
||||
)
|
||||
if not results:
|
||||
return False
|
||||
|
@ -585,18 +591,18 @@ def query_results(
|
|||
dedup_fields = ["msg", "nick", "ident", "host", "net", "channel"]
|
||||
results_parsed = dedup_list(results_parsed, dedup_fields)
|
||||
|
||||
if source not in settings.SAFE_SOURCES:
|
||||
if settings.ENCRYPTION:
|
||||
encrypt_list(request.user, results_parsed, settings.ENCRYPTION_KEY)
|
||||
# if source not in settings.SAFE_SOURCES:
|
||||
# if settings.ENCRYPTION:
|
||||
# encrypt_list(request.user, results_parsed, settings.ENCRYPTION_KEY)
|
||||
|
||||
if settings.HASHING:
|
||||
hash_list(request.user, results_parsed)
|
||||
# if settings.HASHING:
|
||||
# hash_list(request.user, results_parsed)
|
||||
|
||||
if settings.OBFUSCATION:
|
||||
obfuscate_list(request.user, results_parsed)
|
||||
# if settings.OBFUSCATION:
|
||||
# obfuscate_list(request.user, results_parsed)
|
||||
|
||||
if settings.RANDOMISATION:
|
||||
randomise_list(request.user, results_parsed)
|
||||
# if settings.RANDOMISATION:
|
||||
# randomise_list(request.user, results_parsed)
|
||||
|
||||
# process_list(results)
|
||||
|
||||
|
@ -612,14 +618,14 @@ def query_results(
|
|||
context["exemption"] = results["exemption"]
|
||||
if query:
|
||||
context["query"] = query
|
||||
if settings.DELAY_RESULTS:
|
||||
if source not in settings.SAFE_SOURCES:
|
||||
if not request.user.has_perm("core.bypass_delay"):
|
||||
context["delay"] = settings.DELAY_DURATION
|
||||
if settings.RANDOMISATION:
|
||||
if source not in settings.SAFE_SOURCES:
|
||||
if not request.user.has_perm("core.bypass_randomisation"):
|
||||
context["randomised"] = True
|
||||
# if settings.DELAY_RESULTS:
|
||||
# if source not in settings.SAFE_SOURCES:
|
||||
# if not request.user.has_perm("core.bypass_delay"):
|
||||
# context["delay"] = settings.DELAY_DURATION
|
||||
# if settings.RANDOMISATION:
|
||||
# if source not in settings.SAFE_SOURCES:
|
||||
# if not request.user.has_perm("core.bypass_randomisation"):
|
||||
# context["randomised"] = True
|
||||
return context
|
||||
|
||||
|
||||
|
|
|
@ -107,16 +107,17 @@ class ContentBlock(models.Model):
|
|||
class Perms(models.Model):
|
||||
class Meta:
|
||||
permissions = (
|
||||
("bypass_hashing", "Can bypass field hashing"),
|
||||
("bypass_blacklist", "Can bypass the blacklist"),
|
||||
("bypass_encryption", "Can bypass field encryption"),
|
||||
("bypass_obfuscation", "Can bypass field obfuscation"),
|
||||
("bypass_delay", "Can bypass data delay"),
|
||||
("bypass_randomisation", "Can bypass data randomisation"),
|
||||
("bypass_hashing", "Can bypass field hashing"), #
|
||||
("bypass_blacklist", "Can bypass the blacklist"), #
|
||||
("bypass_encryption", "Can bypass field encryption"), #
|
||||
("bypass_obfuscation", "Can bypass field obfuscation"), #
|
||||
("bypass_delay", "Can bypass data delay"), #
|
||||
("bypass_randomisation", "Can bypass data randomisation"), #
|
||||
("post_irc", "Can post to IRC"),
|
||||
("post_discord", "Can post to Discord"),
|
||||
("query_search", "Can search with query strings"),
|
||||
("query_search", "Can search with query strings"), #
|
||||
("use_insights", "Can use the Insights page"),
|
||||
("index_int", "Can use the internal index"),
|
||||
("index_meta", "Can use the meta index"),
|
||||
("restricted_sources", "Can access restricted sources"),
|
||||
)
|
||||
|
|
|
@ -83,7 +83,8 @@
|
|||
type="checkbox"
|
||||
class="switch is-rounded is-info"
|
||||
{% if params.query_full is not None %}checked="checked"{% else %}none{% endif %}
|
||||
{% if not perms.core.query_search %}
|
||||
{% if False %}
|
||||
{# what are you looking at? #}
|
||||
disabled
|
||||
{% endif %}
|
||||
data-script="on click toggle .is-hidden on #query_full">
|
||||
|
@ -91,7 +92,8 @@
|
|||
for="full_query">
|
||||
Full query
|
||||
</label>
|
||||
{% if not perms.core.query_search %}
|
||||
{% if False %}
|
||||
{# what are you looking at? #}
|
||||
<span class="tooltiptext tag is-danger is-light">No access</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import re
|
||||
from base64 import b64encode
|
||||
from random import randint
|
||||
# import re
|
||||
# from base64 import b64encode
|
||||
# from random import randint
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
|
||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
||||
from django.conf import settings
|
||||
from siphashc import siphash
|
||||
from sortedcontainers import SortedSet
|
||||
# from cryptography.hazmat.primitives.ciphers import Cipher, algorithms
|
||||
# from cryptography.hazmat.primitives.ciphers.modes import ECB
|
||||
# from django.conf import settings
|
||||
# from siphashc import siphash
|
||||
# from sortedcontainers import SortedSet
|
||||
|
||||
from core import r
|
||||
# from core import r
|
||||
|
||||
|
||||
class SearchDenied:
|
||||
|
@ -62,248 +62,249 @@ def dedup_list(data, check_keys):
|
|||
# # 1.0805372429895215
|
||||
|
||||
|
||||
def base36encode(number, alphabet="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
|
||||
"""Converts an integer to a base36 string."""
|
||||
if not isinstance(number, (int)):
|
||||
raise TypeError("number must be an integer")
|
||||
# def base36encode(number, alphabet="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
|
||||
# """Converts an integer to a base36 string."""
|
||||
# if not isinstance(number, (int)):
|
||||
# raise TypeError("number must be an integer")
|
||||
|
||||
base36 = ""
|
||||
sign = ""
|
||||
# base36 = ""
|
||||
# sign = ""
|
||||
|
||||
if number < 0:
|
||||
sign = "-"
|
||||
number = -number
|
||||
# if number < 0:
|
||||
# sign = "-"
|
||||
# number = -number
|
||||
|
||||
if 0 <= number < len(alphabet):
|
||||
return sign + alphabet[number]
|
||||
# if 0 <= number < len(alphabet):
|
||||
# return sign + alphabet[number]
|
||||
|
||||
while number != 0:
|
||||
number, i = divmod(number, len(alphabet))
|
||||
base36 = alphabet[i] + base36
|
||||
# while number != 0:
|
||||
# number, i = divmod(number, len(alphabet))
|
||||
# base36 = alphabet[i] + base36
|
||||
|
||||
return sign + base36
|
||||
# return sign + base36
|
||||
|
||||
|
||||
def base36decode(number):
|
||||
return int(number, 36)
|
||||
# def base36decode(number):
|
||||
# return int(number, 36)
|
||||
|
||||
|
||||
def randomise_list(user, data):
|
||||
"""
|
||||
Randomise data in a list of dictionaries.
|
||||
"""
|
||||
if user.has_perm("core.bypass_randomisation"):
|
||||
return
|
||||
if isinstance(data, list):
|
||||
for index, item in enumerate(data):
|
||||
for key, value in item.items():
|
||||
if key in settings.RANDOMISE_FIELDS:
|
||||
if isinstance(value, int):
|
||||
min_val = value - (value * settings.RANDOMISE_RATIO)
|
||||
max_val = value + (value * settings.RANDOMISE_RATIO)
|
||||
new_val = randint(int(min_val), int(max_val))
|
||||
data[index][key] = new_val
|
||||
elif isinstance(data, dict):
|
||||
for key, value in data.items():
|
||||
# if key in settings.RANDOMISE_FIELDS:
|
||||
if isinstance(value, int):
|
||||
min_val = value - (value * settings.RANDOMISE_RATIO)
|
||||
max_val = value + (value * settings.RANDOMISE_RATIO)
|
||||
new_val = randint(int(min_val), int(max_val))
|
||||
data[key] = new_val
|
||||
# def randomise_list(user, data):
|
||||
# """
|
||||
# Randomise data in a list of dictionaries.
|
||||
# """
|
||||
# if user.has_perm("core.bypass_randomisation"):
|
||||
# return
|
||||
# if isinstance(data, list):
|
||||
# for index, item in enumerate(data):
|
||||
# for key, value in item.items():
|
||||
# if key in settings.RANDOMISE_FIELDS:
|
||||
# if isinstance(value, int):
|
||||
# min_val = value - (value * settings.RANDOMISE_RATIO)
|
||||
# max_val = value + (value * settings.RANDOMISE_RATIO)
|
||||
# new_val = randint(int(min_val), int(max_val))
|
||||
# data[index][key] = new_val
|
||||
# elif isinstance(data, dict):
|
||||
# for key, value in data.items():
|
||||
# # if key in settings.RANDOMISE_FIELDS:
|
||||
# if isinstance(value, int):
|
||||
# min_val = value - (value * settings.RANDOMISE_RATIO)
|
||||
# max_val = value + (value * settings.RANDOMISE_RATIO)
|
||||
# new_val = randint(int(min_val), int(max_val))
|
||||
# data[key] = new_val
|
||||
|
||||
|
||||
def obfuscate_list(user, data):
|
||||
"""
|
||||
Obfuscate data in a list of dictionaries.
|
||||
"""
|
||||
if user.has_perm("core.bypass_obfuscation"):
|
||||
return
|
||||
for index, item in enumerate(data):
|
||||
for key, value in item.items():
|
||||
# Obfuscate a ratio of the field
|
||||
if key in settings.OBFUSCATE_FIELDS:
|
||||
length = len(value) - 1
|
||||
split = int(length * settings.OBFUSCATE_KEEP_RATIO)
|
||||
first_part = value[:split]
|
||||
second_part = value[split:]
|
||||
second_len = len(second_part)
|
||||
second_part = "*" * second_len
|
||||
data[index][key] = first_part + second_part
|
||||
# Obfuscate value based on fields
|
||||
# Example: 2022-02-02 -> 2022-02-**
|
||||
# 14:11:12 -> 14:11:**
|
||||
elif key in settings.OBFUSCATE_FIELDS_SEP:
|
||||
if "-" in value:
|
||||
sep = "-"
|
||||
value_spl = value.split("-")
|
||||
hide_num = settings.OBFUSCATE_DASH_NUM
|
||||
elif ":" in value:
|
||||
sep = ":"
|
||||
value_spl = value.split(":")
|
||||
hide_num = settings.OBFUSCATE_COLON_NUM
|
||||
# def obfuscate_list(user, data):
|
||||
# """
|
||||
# Obfuscate data in a list of dictionaries.
|
||||
# """
|
||||
# if user.has_perm("core.bypass_obfuscation"):
|
||||
# return
|
||||
# for index, item in enumerate(data):
|
||||
# for key, value in item.items():
|
||||
# # Obfuscate a ratio of the field
|
||||
# if key in settings.OBFUSCATE_FIELDS:
|
||||
# length = len(value) - 1
|
||||
# split = int(length * settings.OBFUSCATE_KEEP_RATIO)
|
||||
# first_part = value[:split]
|
||||
# second_part = value[split:]
|
||||
# second_len = len(second_part)
|
||||
# second_part = "*" * second_len
|
||||
# data[index][key] = first_part + second_part
|
||||
# # Obfuscate value based on fields
|
||||
# # Example: 2022-02-02 -> 2022-02-**
|
||||
# # 14:11:12 -> 14:11:**
|
||||
# elif key in settings.OBFUSCATE_FIELDS_SEP:
|
||||
# if "-" in value:
|
||||
# sep = "-"
|
||||
# value_spl = value.split("-")
|
||||
# hide_num = settings.OBFUSCATE_DASH_NUM
|
||||
# elif ":" in value:
|
||||
# sep = ":"
|
||||
# value_spl = value.split(":")
|
||||
# hide_num = settings.OBFUSCATE_COLON_NUM
|
||||
|
||||
first_part = value_spl[:hide_num]
|
||||
second_part = value_spl[hide_num:]
|
||||
for index_x, x in enumerate(second_part):
|
||||
x_len = len(x)
|
||||
second_part[index_x] = "*" * x_len
|
||||
result = sep.join([*first_part, *second_part])
|
||||
data[index][key] = result
|
||||
for key in settings.COMBINE_FIELDS:
|
||||
for index, item in enumerate(data):
|
||||
if key in item:
|
||||
k1, k2 = settings.COMBINE_FIELDS[key]
|
||||
if k1 in item and k2 in item:
|
||||
data[index][key] = item[k1] + item[k2]
|
||||
# first_part = value_spl[:hide_num]
|
||||
# second_part = value_spl[hide_num:]
|
||||
# for index_x, x in enumerate(second_part):
|
||||
# x_len = len(x)
|
||||
# second_part[index_x] = "*" * x_len
|
||||
# result = sep.join([*first_part, *second_part])
|
||||
# data[index][key] = result
|
||||
# for key in settings.COMBINE_FIELDS:
|
||||
# for index, item in enumerate(data):
|
||||
# if key in item:
|
||||
# k1, k2 = settings.COMBINE_FIELDS[key]
|
||||
# if k1 in item and k2 in item:
|
||||
# data[index][key] = item[k1] + item[k2]
|
||||
|
||||
|
||||
def hash_list(user, data, hash_keys=False):
|
||||
"""
|
||||
Hash a list of dicts or a list with SipHash42.
|
||||
"""
|
||||
if user.has_perm("core.bypass_hashing"):
|
||||
return
|
||||
cache = "cache.hash"
|
||||
hash_table = {}
|
||||
if isinstance(data, dict):
|
||||
data_copy = [{x: data[x]} for x in data]
|
||||
else:
|
||||
data_copy = type(data)((data))
|
||||
for index, item in enumerate(data_copy):
|
||||
if "src" in item:
|
||||
if item["src"] in settings.SAFE_SOURCES:
|
||||
continue
|
||||
if isinstance(item, dict):
|
||||
for key, value in list(item.items()):
|
||||
if (
|
||||
key not in settings.WHITELIST_FIELDS
|
||||
and key not in settings.NO_OBFUSCATE_PARAMS
|
||||
):
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
if isinstance(value, bool):
|
||||
continue
|
||||
if value is None:
|
||||
continue
|
||||
if hash_keys:
|
||||
hashed = siphash(settings.HASHING_KEY, key)
|
||||
else:
|
||||
hashed = siphash(settings.HASHING_KEY, value)
|
||||
encoded = base36encode(hashed)
|
||||
if encoded not in hash_table:
|
||||
if hash_keys:
|
||||
hash_table[encoded] = key
|
||||
else:
|
||||
hash_table[encoded] = value
|
||||
if hash_keys:
|
||||
# Rename the dict key
|
||||
data[encoded] = data.pop(key)
|
||||
else:
|
||||
data[index][key] = encoded
|
||||
elif isinstance(item, str):
|
||||
hashed = siphash(settings.HASHING_KEY, item)
|
||||
encoded = base36encode(hashed)
|
||||
if encoded not in hash_table:
|
||||
hash_table[encoded] = item
|
||||
data[index] = encoded
|
||||
if hash_table:
|
||||
r.hmset(cache, hash_table)
|
||||
# def hash_list(user, data, hash_keys=False):
|
||||
# """
|
||||
# Hash a list of dicts or a list with SipHash42.
|
||||
# """
|
||||
# if user.has_perm("core.bypass_hashing"):
|
||||
# return
|
||||
# cache = "cache.hash"
|
||||
# hash_table = {}
|
||||
# if isinstance(data, dict):
|
||||
# data_copy = [{x: data[x]} for x in data]
|
||||
# else:
|
||||
# data_copy = type(data)((data))
|
||||
# for index, item in enumerate(data_copy):
|
||||
# if "src" in item:
|
||||
# if item["src"] in settings.SAFE_SOURCES:
|
||||
# continue
|
||||
# if isinstance(item, dict):
|
||||
# for key, value in list(item.items()):
|
||||
# if (
|
||||
# key not in settings.WHITELIST_FIELDS
|
||||
# and key not in settings.NO_OBFUSCATE_PARAMS
|
||||
# ):
|
||||
# if isinstance(value, int):
|
||||
# value = str(value)
|
||||
# if isinstance(value, bool):
|
||||
# continue
|
||||
# if value is None:
|
||||
# continue
|
||||
# if hash_keys:
|
||||
# hashed = siphash(settings.HASHING_KEY, key)
|
||||
# else:
|
||||
# hashed = siphash(settings.HASHING_KEY, value)
|
||||
# encoded = base36encode(hashed)
|
||||
# if encoded not in hash_table:
|
||||
# if hash_keys:
|
||||
# hash_table[encoded] = key
|
||||
# else:
|
||||
# hash_table[encoded] = value
|
||||
# if hash_keys:
|
||||
# # Rename the dict key
|
||||
# data[encoded] = data.pop(key)
|
||||
# else:
|
||||
# data[index][key] = encoded
|
||||
# elif isinstance(item, str):
|
||||
# hashed = siphash(settings.HASHING_KEY, item)
|
||||
# encoded = base36encode(hashed)
|
||||
# if encoded not in hash_table:
|
||||
# hash_table[encoded] = item
|
||||
# data[index] = encoded
|
||||
# if hash_table:
|
||||
# r.hmset(cache, hash_table)
|
||||
|
||||
|
||||
def hash_lookup(user, data_dict, supplementary_data=None):
|
||||
cache = "cache.hash"
|
||||
hash_list = SortedSet()
|
||||
denied = []
|
||||
for key, value in list(data_dict.items()):
|
||||
if "source" in data_dict:
|
||||
if data_dict["source"] in settings.SAFE_SOURCES:
|
||||
continue
|
||||
if "src" in data_dict:
|
||||
if data_dict["src"] in settings.SAFE_SOURCES:
|
||||
continue
|
||||
if supplementary_data:
|
||||
if "source" in supplementary_data:
|
||||
if supplementary_data["source"] in settings.SAFE_SOURCES:
|
||||
continue
|
||||
if key in settings.SEARCH_FIELDS_DENY:
|
||||
if not user.has_perm("core.bypass_hashing"):
|
||||
data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
||||
denied.append(data_dict[key])
|
||||
if (
|
||||
key not in settings.WHITELIST_FIELDS
|
||||
and key not in settings.NO_OBFUSCATE_PARAMS
|
||||
):
|
||||
if not value:
|
||||
continue
|
||||
# hashes = re.findall("\|([^\|]*)\|", value) # noqa
|
||||
if isinstance(value, str):
|
||||
hashes = re.findall("[A-Z0-9]{12,13}", value)
|
||||
elif isinstance(value, dict):
|
||||
hashes = []
|
||||
for key, value in value.items():
|
||||
if not value:
|
||||
continue
|
||||
hashes_iter = re.findall("[A-Z0-9]{12,13}", value)
|
||||
for h in hashes_iter:
|
||||
hashes.append(h)
|
||||
if not hashes:
|
||||
# Otherwise the user could inject plaintext search queries
|
||||
if not user.has_perm("core.bypass_hashing"):
|
||||
data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
||||
denied.append(data_dict[key])
|
||||
continue
|
||||
else:
|
||||
# There are hashes here but there shouldn't be!
|
||||
if key in settings.TAG_SEARCH_DENY:
|
||||
data_dict[key] = LookupDenied(key=key, value=data_dict[key])
|
||||
denied.append(data_dict[key])
|
||||
continue
|
||||
# def hash_lookup(user, data_dict, supplementary_data=None):
|
||||
# cache = "cache.hash"
|
||||
# hash_list = SortedSet()
|
||||
# denied = []
|
||||
# for key, value in list(data_dict.items()):
|
||||
# if "source" in data_dict:
|
||||
# if data_dict["source"] in settings.SAFE_SOURCES:
|
||||
# continue
|
||||
# if "src" in data_dict:
|
||||
# if data_dict["src"] in settings.SAFE_SOURCES:
|
||||
# continue
|
||||
# if supplementary_data:
|
||||
# if "source" in supplementary_data:
|
||||
# if supplementary_data["source"] in settings.SAFE_SOURCES:
|
||||
# continue
|
||||
# if key in settings.SEARCH_FIELDS_DENY:
|
||||
# if not user.has_perm("core.bypass_hashing"):
|
||||
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
||||
# denied.append(data_dict[key])
|
||||
# if (
|
||||
# key not in settings.WHITELIST_FIELDS
|
||||
# and key not in settings.NO_OBFUSCATE_PARAMS
|
||||
# ):
|
||||
# if not value:
|
||||
# continue
|
||||
# # hashes = re.findall("\|([^\|]*)\|", value) # noqa
|
||||
# if isinstance(value, str):
|
||||
# hashes = re.findall("[A-Z0-9]{12,13}", value)
|
||||
# elif isinstance(value, dict):
|
||||
# hashes = []
|
||||
# for key, value in value.items():
|
||||
# if not value:
|
||||
# continue
|
||||
# hashes_iter = re.findall("[A-Z0-9]{12,13}", value)
|
||||
# for h in hashes_iter:
|
||||
# hashes.append(h)
|
||||
# if not hashes:
|
||||
# # Otherwise the user could inject plaintext search queries
|
||||
# if not user.has_perm("core.bypass_hashing"):
|
||||
# data_dict[key] = SearchDenied(key=key, value=data_dict[key])
|
||||
# denied.append(data_dict[key])
|
||||
# continue
|
||||
# else:
|
||||
# # There are hashes here but there shouldn't be!
|
||||
# if key in settings.TAG_SEARCH_DENY:
|
||||
# data_dict[key] = LookupDenied(key=key, value=data_dict[key])
|
||||
# denied.append(data_dict[key])
|
||||
# continue
|
||||
|
||||
for hash in hashes:
|
||||
hash_list.add(hash)
|
||||
# for hash in hashes:
|
||||
# hash_list.add(hash)
|
||||
|
||||
if hash_list:
|
||||
values = r.hmget(cache, *hash_list)
|
||||
if not values:
|
||||
return
|
||||
for index, val in enumerate(values):
|
||||
if val is None:
|
||||
values[index] = b"ERR"
|
||||
values = [x.decode() for x in values]
|
||||
total = dict(zip(hash_list, values))
|
||||
for key in data_dict.keys():
|
||||
for hash in total:
|
||||
if data_dict[key]:
|
||||
if isinstance(data_dict[key], str):
|
||||
if hash in data_dict[key]:
|
||||
data_dict[key] = data_dict[key].replace(
|
||||
f"{hash}", total[hash]
|
||||
)
|
||||
elif isinstance(data_dict[key], dict):
|
||||
for k2, v2 in data_dict[key].items():
|
||||
if hash in v2:
|
||||
data_dict[key][k2] = v2.replace(f"{hash}", total[hash])
|
||||
return denied
|
||||
# if hash_list:
|
||||
# values = r.hmget(cache, *hash_list)
|
||||
# if not values:
|
||||
# return
|
||||
# for index, val in enumerate(values):
|
||||
# if val is None:
|
||||
# values[index] = b"ERR"
|
||||
# values = [x.decode() for x in values]
|
||||
# total = dict(zip(hash_list, values))
|
||||
# for key in data_dict.keys():
|
||||
# for hash in total:
|
||||
# if data_dict[key]:
|
||||
# if isinstance(data_dict[key], str):
|
||||
# if hash in data_dict[key]:
|
||||
# data_dict[key] = data_dict[key].replace(
|
||||
# f"{hash}", total[hash]
|
||||
# )
|
||||
# elif isinstance(data_dict[key], dict):
|
||||
# for k2, v2 in data_dict[key].items():
|
||||
# if hash in v2:
|
||||
# data_dict[key][k2] = v2.repl
|
||||
# ace(f"{hash}", total[hash])
|
||||
# return denied
|
||||
|
||||
|
||||
def encrypt_list(user, data, secret):
|
||||
if user.has_perm("core.bypass_encryption"):
|
||||
return
|
||||
cipher = Cipher(algorithms.AES(secret), ECB())
|
||||
for index, item in enumerate(data):
|
||||
for key, value in item.items():
|
||||
if key not in settings.WHITELIST_FIELDS:
|
||||
encryptor = cipher.encryptor()
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
if isinstance(value, bool):
|
||||
continue
|
||||
if value is None:
|
||||
continue
|
||||
decoded = value.encode("utf8", "replace")
|
||||
length = 16 - (len(decoded) % 16)
|
||||
decoded += bytes([length]) * length
|
||||
ct = encryptor.update(decoded) + encryptor.finalize()
|
||||
final_str = b64encode(ct)
|
||||
data[index][key] = final_str.decode("utf-8", "replace")
|
||||
# def encrypt_list(user, data, secret):
|
||||
# if user.has_perm("core.bypass_encryption"):
|
||||
# return
|
||||
# cipher = Cipher(algorithms.AES(secret), ECB())
|
||||
# for index, item in enumerate(data):
|
||||
# for key, value in item.items():
|
||||
# if key not in settings.WHITELIST_FIELDS:
|
||||
# encryptor = cipher.encryptor()
|
||||
# if isinstance(value, int):
|
||||
# value = str(value)
|
||||
# if isinstance(value, bool):
|
||||
# continue
|
||||
# if value is None:
|
||||
# continue
|
||||
# decoded = value.encode("utf8", "replace")
|
||||
# length = 16 - (len(decoded) % 16)
|
||||
# decoded += bytes([length]) * length
|
||||
# ct = encryptor.update(decoded) + encryptor.finalize()
|
||||
# final_str = b64encode(ct)
|
||||
# data[index][key] = final_str.decode("utf-8", "replace")
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import json
|
||||
import urllib
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse, JsonResponse
|
||||
|
@ -20,9 +19,10 @@ from core.lib.threshold import (
|
|||
get_chans,
|
||||
get_users,
|
||||
)
|
||||
from core.views.helpers import hash_list, hash_lookup, randomise_list
|
||||
from core.views.ui.tables import DrilldownTable
|
||||
|
||||
# from copy import deepcopy
|
||||
|
||||
|
||||
def parse_dates(dates):
|
||||
spl = dates.split(" - ")
|
||||
|
@ -313,14 +313,14 @@ class DrilldownContextModal(APIView):
|
|||
query_params[key] = None
|
||||
|
||||
# Lookup the hash values but don't disclose them to the user
|
||||
if settings.HASHING:
|
||||
if query_params["source"] not in settings.SAFE_SOURCES:
|
||||
SAFE_PARAMS = deepcopy(query_params)
|
||||
hash_lookup(request.user, SAFE_PARAMS)
|
||||
else:
|
||||
SAFE_PARAMS = deepcopy(query_params)
|
||||
else:
|
||||
SAFE_PARAMS = query_params
|
||||
# if settings.HASHING:
|
||||
# if query_params["source"] not in settings.SAFE_SOURCES:
|
||||
# SAFE_PARAMS = deepcopy(query_params)
|
||||
# hash_lookup(request.user, SAFE_PARAMS)
|
||||
# else:
|
||||
# SAFE_PARAMS = deepcopy(query_params)
|
||||
# else:
|
||||
# SAFE_PARAMS = query_params
|
||||
|
||||
type = None
|
||||
if request.user.is_superuser:
|
||||
|
@ -328,12 +328,12 @@ class DrilldownContextModal(APIView):
|
|||
type = query_params["type"]
|
||||
if type == "znc":
|
||||
query_params["channel"] = "*status"
|
||||
SAFE_PARAMS["channel"] = "*status"
|
||||
# SAFE_PARAMS["channel"] = "*status"
|
||||
|
||||
if type in ["query", "notice"]:
|
||||
nicks_sensitive = [
|
||||
SAFE_PARAMS["channel"],
|
||||
SAFE_PARAMS["nick"],
|
||||
query_params["channel"],
|
||||
query_params["nick"],
|
||||
] # UNSAFE
|
||||
# nicks = [query_params["channel"], query_params["nick"]]
|
||||
query = True
|
||||
|
@ -344,17 +344,17 @@ class DrilldownContextModal(APIView):
|
|||
and not type == "query"
|
||||
):
|
||||
query_params["index"] = "main"
|
||||
SAFE_PARAMS["index"] = "main"
|
||||
# SAFE_PARAMS["index"] = "main"
|
||||
|
||||
if query_params["type"] in ["znc", "auth"]:
|
||||
query = True
|
||||
|
||||
if not request.user.is_superuser:
|
||||
query_params["index"] = "main"
|
||||
SAFE_PARAMS["index"] = "main"
|
||||
# SAFE_PARAMS["index"] = "main"
|
||||
|
||||
query_params["sorting"] = "desc"
|
||||
SAFE_PARAMS["sorting"] = "desc"
|
||||
# SAFE_PARAMS["sorting"] = "desc"
|
||||
|
||||
annotate = False
|
||||
if query_params["source"] == "irc":
|
||||
|
@ -363,10 +363,10 @@ class DrilldownContextModal(APIView):
|
|||
# Create the query with the context helper
|
||||
search_query = construct_query(
|
||||
query_params["index"],
|
||||
SAFE_PARAMS["net"],
|
||||
SAFE_PARAMS["channel"],
|
||||
query_params["net"],
|
||||
query_params["channel"],
|
||||
query_params["source"],
|
||||
SAFE_PARAMS["num"],
|
||||
query_params["num"],
|
||||
size,
|
||||
type=type,
|
||||
nicks=nicks_sensitive,
|
||||
|
@ -374,7 +374,7 @@ class DrilldownContextModal(APIView):
|
|||
|
||||
results = query_results(
|
||||
request,
|
||||
SAFE_PARAMS,
|
||||
query_params,
|
||||
annotate=annotate,
|
||||
custom_query=search_query,
|
||||
reverse=True,
|
||||
|
@ -384,15 +384,15 @@ class DrilldownContextModal(APIView):
|
|||
if "message" in results:
|
||||
return render(request, self.template_name, results)
|
||||
|
||||
if settings.HASHING: # we probably want to see the tokens
|
||||
if query_params["source"] not in settings.SAFE_SOURCES:
|
||||
if not request.user.has_perm("core.bypass_hashing"):
|
||||
for index, item in enumerate(results["object_list"]):
|
||||
if "tokens" in item:
|
||||
results["object_list"][index]["msg"] = results[
|
||||
"object_list"
|
||||
][index].pop("tokens")
|
||||
# item["msg"] = item.pop("tokens")
|
||||
# if settings.HASHING: # we probably want to see the tokens
|
||||
# if query_params["source"] not in settings.SAFE_SOURCES:
|
||||
# if not request.user.has_perm("core.bypass_hashing"):
|
||||
# for index, item in enumerate(results["object_list"]):
|
||||
# if "tokens" in item:
|
||||
# results["object_list"][index]["msg"] = results[
|
||||
# "object_list"
|
||||
# ][index].pop("tokens")
|
||||
# # item["msg"] = item.pop("tokens")
|
||||
|
||||
# Make the time nicer
|
||||
# for index, item in enumerate(results["object_list"]):
|
||||
|
@ -446,37 +446,35 @@ class ThresholdInfoModal(APIView):
|
|||
|
||||
# SAFE BLOCK #
|
||||
# Lookup the hash values but don't disclose them to the user
|
||||
if settings.HASHING:
|
||||
SAFE_PARAMS = request.data.dict()
|
||||
hash_lookup(request.user, SAFE_PARAMS)
|
||||
safe_net = SAFE_PARAMS["net"]
|
||||
safe_nick = SAFE_PARAMS["nick"]
|
||||
safe_channel = SAFE_PARAMS["channel"]
|
||||
channels = get_chans(safe_net, [safe_nick])
|
||||
users = get_users(safe_net, [safe_channel])
|
||||
num_users = annotate_num_users(safe_net, channels)
|
||||
num_chans = annotate_num_chans(safe_net, users)
|
||||
# if settings.HASHING:
|
||||
# SAFE_PARAMS = request.data.dict()
|
||||
# hash_lookup(request.user, SAFE_PARAMS)
|
||||
|
||||
channels = get_chans(net, [nick])
|
||||
users = get_users(net, [nick])
|
||||
num_users = annotate_num_users(net, channels)
|
||||
num_chans = annotate_num_chans(net, users)
|
||||
if channels:
|
||||
inter_users = get_users(safe_net, channels)
|
||||
inter_users = get_users(net, channels)
|
||||
else:
|
||||
inter_users = []
|
||||
if users:
|
||||
inter_chans = get_chans(safe_net, users)
|
||||
inter_chans = get_chans(net, users)
|
||||
else:
|
||||
inter_chans = []
|
||||
if settings.HASHING:
|
||||
hash_list(request.user, inter_chans)
|
||||
hash_list(request.user, inter_users)
|
||||
# if settings.HASHING:
|
||||
# hash_list(request.user, inter_chans)
|
||||
# hash_list(request.user, inter_users)
|
||||
|
||||
hash_list(request.user, num_chans, hash_keys=True)
|
||||
hash_list(request.user, num_users, hash_keys=True)
|
||||
# hash_list(request.user, num_chans, hash_keys=True)
|
||||
# hash_list(request.user, num_users, hash_keys=True)
|
||||
|
||||
hash_list(request.user, channels)
|
||||
hash_list(request.user, users)
|
||||
# hash_list(request.user, channels)
|
||||
# hash_list(request.user, users)
|
||||
|
||||
if settings.RANDOMISATION:
|
||||
randomise_list(request.user, num_chans)
|
||||
randomise_list(request.user, num_users)
|
||||
# if settings.RANDOMISATION:
|
||||
# randomise_list(request.user, num_chans)
|
||||
# randomise_list(request.user, num_users)
|
||||
|
||||
# SAFE BLOCK END #
|
||||
|
||||
|
|
Loading…
Reference in New Issue