Harden security
This commit is contained in:
@@ -3,6 +3,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date, datetime, timezone as dt_timezone
|
||||
from decimal import Decimal, InvalidOperation
|
||||
import re
|
||||
from typing import Any, Callable
|
||||
from urllib.parse import urlencode
|
||||
|
||||
@@ -20,6 +21,51 @@ from mixins.views import ObjectList
|
||||
|
||||
from core.models import Group, Manipulation, Message, Person, PersonIdentifier, Persona
|
||||
|
||||
_QUERY_MAX_LEN = 400
|
||||
_QUERY_ALLOWED_PATTERN = re.compile(r"[\w\s@\-\+\.:,#/]+", re.UNICODE)
|
||||
|
||||
|
||||
def _sanitize_search_query(value: str) -> str:
|
||||
raw = str(value or "").strip()
|
||||
if not raw:
|
||||
return ""
|
||||
trimmed = raw[:_QUERY_MAX_LEN]
|
||||
cleaned = "".join(_QUERY_ALLOWED_PATTERN.findall(trimmed)).strip()
|
||||
return cleaned
|
||||
|
||||
|
||||
def _safe_page_number(value: Any) -> int:
|
||||
try:
|
||||
page_value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
return 1
|
||||
return max(1, page_value)
|
||||
|
||||
|
||||
def _safe_query_param(request, key: str, default: str = "") -> str:
|
||||
raw = request.GET.get(key, default)
|
||||
return str(raw or default).strip()
|
||||
|
||||
|
||||
def _sanitize_query_state(raw: dict[str, Any]) -> dict[str, str]:
|
||||
cleaned: dict[str, str] = {}
|
||||
for key, value in (raw or {}).items():
|
||||
key_text = str(key or "").strip()
|
||||
if not key_text or len(key_text) > 80:
|
||||
continue
|
||||
value_text = str(value or "").strip()
|
||||
if not value_text:
|
||||
continue
|
||||
if key_text in {"q", "query"}:
|
||||
value_text = _sanitize_search_query(value_text)
|
||||
elif key_text == "page":
|
||||
value_text = str(_safe_page_number(value_text))
|
||||
else:
|
||||
value_text = value_text[:200]
|
||||
if value_text:
|
||||
cleaned[key_text] = value_text
|
||||
return cleaned
|
||||
|
||||
|
||||
def _context_type(request_type: str) -> str:
|
||||
return "modal" if request_type == "page" else request_type
|
||||
@@ -561,12 +607,14 @@ class OSINTListBase(ObjectList):
|
||||
return lookups
|
||||
|
||||
def _query_dict(self) -> dict[str, Any]:
|
||||
return {k: v for k, v in self.request.GET.items() if v not in {"", None}}
|
||||
return _sanitize_query_state(
|
||||
{k: v for k, v in self.request.GET.items() if v not in {"", None}}
|
||||
)
|
||||
|
||||
def _apply_list_search(
|
||||
self, queryset: models.QuerySet, scope: OsintScopeConfig
|
||||
) -> models.QuerySet:
|
||||
query = self.request.GET.get("q", "").strip()
|
||||
query = _sanitize_search_query(self.request.GET.get("q", ""))
|
||||
if not query:
|
||||
return queryset
|
||||
|
||||
@@ -721,14 +769,16 @@ class OSINTListBase(ObjectList):
|
||||
}
|
||||
|
||||
if page_obj.has_previous():
|
||||
previous_page = _safe_page_number(page_obj.previous_page_number())
|
||||
pagination["previous_url"] = _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=page_obj.previous_page_number()),
|
||||
{"page": previous_page},
|
||||
)
|
||||
if page_obj.has_next():
|
||||
next_page = _safe_page_number(page_obj.next_page_number())
|
||||
pagination["next_url"] = _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=page_obj.next_page_number()),
|
||||
{"page": next_page},
|
||||
)
|
||||
|
||||
for entry in page_obj.paginator.get_elided_page_range(page_obj.number):
|
||||
@@ -742,7 +792,7 @@ class OSINTListBase(ObjectList):
|
||||
"current": entry == page_obj.number,
|
||||
"url": _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=entry),
|
||||
{"page": _safe_page_number(entry)},
|
||||
),
|
||||
}
|
||||
)
|
||||
@@ -834,7 +884,7 @@ class OSINTSearch(LoginRequiredMixin, View):
|
||||
S - Size, I - Index, Q - Query, T - Tags, S - Source, R - Ranges,
|
||||
S - Sort, S - Sentiment, A - Annotate, D - Dedup, R - Reverse.
|
||||
"""
|
||||
query = str(request.GET.get("q") or "").strip()
|
||||
query = _sanitize_search_query(_safe_query_param(request, "q", ""))
|
||||
tags = tuple(
|
||||
token[4:].strip()
|
||||
for token in query.split()
|
||||
@@ -845,15 +895,16 @@ class OSINTSearch(LoginRequiredMixin, View):
|
||||
index=self._scope_key(request.GET.get("scope")),
|
||||
query=query,
|
||||
tags=tags,
|
||||
source=str(request.GET.get("source") or "all").strip().lower() or "all",
|
||||
date_from=str(request.GET.get("date_from") or "").strip(),
|
||||
date_to=str(request.GET.get("date_to") or "").strip(),
|
||||
sort_mode=str(request.GET.get("sort_mode") or "relevance").strip().lower(),
|
||||
sentiment_min=str(request.GET.get("sentiment_min") or "").strip(),
|
||||
sentiment_max=str(request.GET.get("sentiment_max") or "").strip(),
|
||||
annotate=str(request.GET.get("annotate") or "1").strip() not in {"0", "false", "off"},
|
||||
dedup=str(request.GET.get("dedup") or "").strip() in {"1", "true", "on"},
|
||||
reverse=str(request.GET.get("reverse") or "").strip() in {"1", "true", "on"},
|
||||
source=_safe_query_param(request, "source", "all").lower() or "all",
|
||||
date_from=_safe_query_param(request, "date_from", ""),
|
||||
date_to=_safe_query_param(request, "date_to", ""),
|
||||
sort_mode=_safe_query_param(request, "sort_mode", "relevance").lower(),
|
||||
sentiment_min=_safe_query_param(request, "sentiment_min", ""),
|
||||
sentiment_max=_safe_query_param(request, "sentiment_max", ""),
|
||||
annotate=_safe_query_param(request, "annotate", "1")
|
||||
not in {"0", "false", "off"},
|
||||
dedup=_safe_query_param(request, "dedup", "") in {"1", "true", "on"},
|
||||
reverse=_safe_query_param(request, "reverse", "") in {"1", "true", "on"},
|
||||
)
|
||||
|
||||
def _parse_date_boundaries(self, plan: "OSINTSearch.SearchPlan") -> tuple[datetime | None, datetime | None]:
|
||||
@@ -1069,7 +1120,9 @@ class OSINTSearch(LoginRequiredMixin, View):
|
||||
return "all"
|
||||
|
||||
def _query_state(self, request) -> dict[str, Any]:
|
||||
return {k: v for k, v in request.GET.items() if v not in {None, ""}}
|
||||
return _sanitize_query_state(
|
||||
{k: v for k, v in request.GET.items() if v not in {None, ""}}
|
||||
)
|
||||
|
||||
def _apply_common_filters(
|
||||
self,
|
||||
@@ -1359,14 +1412,16 @@ class OSINTSearch(LoginRequiredMixin, View):
|
||||
}
|
||||
|
||||
if page_obj.has_previous():
|
||||
previous_page = _safe_page_number(page_obj.previous_page_number())
|
||||
pagination["previous_url"] = _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=page_obj.previous_page_number()),
|
||||
{"page": previous_page},
|
||||
)
|
||||
if page_obj.has_next():
|
||||
next_page = _safe_page_number(page_obj.next_page_number())
|
||||
pagination["next_url"] = _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=page_obj.next_page_number()),
|
||||
{"page": next_page},
|
||||
)
|
||||
|
||||
for entry in page_obj.paginator.get_elided_page_range(page_obj.number):
|
||||
@@ -1380,7 +1435,7 @@ class OSINTSearch(LoginRequiredMixin, View):
|
||||
"current": entry == page_obj.number,
|
||||
"url": _url_with_query(
|
||||
list_url,
|
||||
_merge_query(query_state, page=entry),
|
||||
{"page": _safe_page_number(entry)},
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -17,6 +17,28 @@ from core.presence import latest_state_for_people
|
||||
from core.views.manage.permissions import SuperUserRequiredMixin
|
||||
|
||||
|
||||
def _safe_json_list(text_value):
|
||||
try:
|
||||
payload = orjson.loads(text_value)
|
||||
except orjson.JSONDecodeError:
|
||||
return []
|
||||
return payload if isinstance(payload, list) else []
|
||||
|
||||
|
||||
def _sanitize_signal_rows(rows):
|
||||
safe_rows = []
|
||||
for row in rows:
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
safe_row = {}
|
||||
for key, value in row.items():
|
||||
if isinstance(key, str) and len(key) <= 100:
|
||||
if isinstance(value, (str, int, float, bool)) or value is None:
|
||||
safe_row[key] = value
|
||||
safe_rows.append(safe_row)
|
||||
return safe_rows
|
||||
|
||||
|
||||
class CustomObjectRead(ObjectRead):
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.request = request
|
||||
@@ -171,21 +193,28 @@ class SignalContactsList(SuperUserRequiredMixin, ObjectList):
|
||||
list_url_args = ["type", "pk"]
|
||||
|
||||
def get_queryset(self, *args, **kwargs):
|
||||
# url = signal:8080/v1/accounts
|
||||
# /v1/configuration/{number}/settings
|
||||
# /v1/identities/{number}
|
||||
# /v1/contacts/{number}
|
||||
# response = requests.get(
|
||||
# f"http://signal:8080/v1/configuration/{self.kwargs['pk']}/settings"
|
||||
# )
|
||||
# config = orjson.loads(response.text)
|
||||
|
||||
base = getattr(settings, "SIGNAL_HTTP_URL", "http://signal:8080").rstrip("/")
|
||||
response = requests.get(f"{base}/v1/identities/{self.kwargs['pk']}")
|
||||
identities = orjson.loads(response.text)
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{base}/v1/identities/{self.kwargs['pk']}", timeout=15
|
||||
)
|
||||
response.raise_for_status()
|
||||
identities = _sanitize_signal_rows(response.json() or [])
|
||||
except requests.RequestException:
|
||||
identities = []
|
||||
except ValueError:
|
||||
identities = []
|
||||
|
||||
response = requests.get(f"{base}/v1/contacts/{self.kwargs['pk']}")
|
||||
contacts = orjson.loads(response.text)
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{base}/v1/contacts/{self.kwargs['pk']}", timeout=15
|
||||
)
|
||||
response.raise_for_status()
|
||||
contacts = _sanitize_signal_rows(response.json() or [])
|
||||
except requests.RequestException:
|
||||
contacts = []
|
||||
except ValueError:
|
||||
contacts = []
|
||||
|
||||
# add identities to contacts
|
||||
for contact in contacts:
|
||||
|
||||
Reference in New Issue
Block a user