Work on fixing bugs and reformat

This commit is contained in:
2026-02-16 16:01:17 +00:00
parent 8ca1695fab
commit 3f82c27ab9
32 changed files with 1100 additions and 442 deletions

View File

@@ -4,8 +4,9 @@ import hashlib
import json
import re
import time
from datetime import datetime
from datetime import timezone as dt_timezone
from difflib import SequenceMatcher
from datetime import datetime, timezone as dt_timezone
from urllib.parse import quote_plus, urlencode, urlparse
from asgiref.sync import async_to_sync
@@ -40,7 +41,11 @@ from core.models import (
WorkspaceConversation,
)
from core.realtime.typing_state import get_person_typing_state
from core.views.workspace import INSIGHT_METRICS, _build_engage_payload, _parse_draft_options
from core.views.workspace import (
INSIGHT_METRICS,
_build_engage_payload,
_parse_draft_options,
)
COMPOSE_WS_TOKEN_SALT = "compose-ws"
COMPOSE_ENGAGE_TOKEN_SALT = "compose-engage"
@@ -129,7 +134,9 @@ def _extract_urls(text_value: str) -> list[str]:
def _is_url_only_text(text_value: str) -> bool:
lines = [line.strip() for line in str(text_value or "").splitlines() if line.strip()]
lines = [
line.strip() for line in str(text_value or "").splitlines() if line.strip()
]
if not lines:
return False
return all(bool(URL_PATTERN.fullmatch(line)) for line in lines)
@@ -150,10 +157,14 @@ def _is_xmpp_share_url(url_value: str) -> bool:
return False
parsed = urlparse(url_value)
host = str(parsed.netloc or "").strip().lower()
configured = str(
getattr(settings, "XMPP_UPLOAD_SERVICE", "")
or getattr(settings, "XMPP_UPLOAD_JID", "")
).strip().lower()
configured = (
str(
getattr(settings, "XMPP_UPLOAD_SERVICE", "")
or getattr(settings, "XMPP_UPLOAD_JID", "")
)
.strip()
.lower()
)
if not configured:
return False
configured_host = configured
@@ -200,15 +211,21 @@ def _extract_attachment_image_urls(blob) -> list[str]:
return urls
if isinstance(blob, dict):
content_type = str(
blob.get("content_type")
or blob.get("contentType")
or blob.get("mime_type")
or blob.get("mimetype")
or ""
).strip().lower()
content_type = (
str(
blob.get("content_type")
or blob.get("contentType")
or blob.get("mime_type")
or blob.get("mimetype")
or ""
)
.strip()
.lower()
)
filename = str(blob.get("filename") or blob.get("fileName") or "").strip()
image_hint = content_type.startswith("image/") or _looks_like_image_name(filename)
image_hint = content_type.startswith("image/") or _looks_like_image_name(
filename
)
direct_urls = []
for key in ("url", "source_url", "download_url", "proxy_url", "href", "uri"):
@@ -264,7 +281,9 @@ def _attachment_image_urls_by_message(messages):
).order_by("ts")
for event in linked_events:
legacy_id = str((event.raw_payload_ref or {}).get("legacy_message_id") or "").strip()
legacy_id = str(
(event.raw_payload_ref or {}).get("legacy_message_id") or ""
).strip()
if not legacy_id:
continue
urls = _uniq_ordered(
@@ -296,9 +315,7 @@ def _attachment_image_urls_by_message(messages):
continue
msg_ts = int(msg.ts or 0)
candidates = [
event
for event in fallback_list
if abs(int(event.ts or 0) - msg_ts) <= 3000
event for event in fallback_list if abs(int(event.ts or 0) - msg_ts) <= 3000
]
if not candidates:
continue
@@ -322,8 +339,51 @@ def _serialize_message(msg: Message) -> dict:
and _is_url_only_text(text_value)
and all(_looks_like_image_url(url) for url in image_urls)
)
display_text = text_value if text_value.strip() else ("(no text)" if not image_url else "")
display_text = (
text_value if text_value.strip() else ("(no text)" if not image_url else "")
)
author = str(msg.custom_author or "").strip()
is_outgoing = _is_outgoing(msg)
# Determine source service for display: prefer explicit session identifier service
source_service = "web"
try:
if getattr(msg, "session", None) and getattr(msg.session, "identifier", None):
svc = str(msg.session.identifier.service or "").strip().lower()
if svc:
source_service = svc
except Exception:
pass
from core.util import logs as util_logs
logger = util_logs.get_logger("compose")
logger.info(
f"[serialize_message] id={msg.id} author={author} is_outgoing={is_outgoing} source_service={source_service}"
)
# For outgoing messages sent from web UI, label as "Web Chat".
# For incoming messages, use the session's service name (Xmpp, Signal, Whatsapp, etc).
# But if source_service is still "web" and message is incoming, it may be a data issue—
# don't label it as "Web Chat" since that's misleading.
if is_outgoing:
source_label = "Web Chat"
else:
# Incoming message: use service-specific labels
service_labels = {
"xmpp": "XMPP",
"whatsapp": "WhatsApp",
"signal": "Signal",
"instagram": "Instagram",
"web": "External", # Fallback if service not identified
}
source_label = service_labels.get(
source_service, source_service.title() if source_service else "Unknown"
)
# Ensure source_label is never empty for UI rendering
if not source_label:
source_label = "Unknown"
delivered_ts = int(msg.delivered_ts or 0)
read_ts = int(msg.read_ts or 0)
delivered_display = _format_ts_label(int(delivered_ts)) if delivered_ts else ""
@@ -331,6 +391,17 @@ def _serialize_message(msg: Message) -> dict:
ts_val = int(msg.ts or 0)
delivered_delta = int(delivered_ts - ts_val) if delivered_ts and ts_val else None
read_delta = int(read_ts - ts_val) if read_ts and ts_val else None
# Human friendly delta strings
delivered_delta_display = (
_format_gap_duration(delivered_delta) if delivered_delta is not None else ""
)
read_delta_display = (
_format_gap_duration(read_delta) if read_delta is not None else ""
)
# Receipt payload and metadata
receipt_payload = msg.receipt_payload or {}
read_source_service = str(msg.read_source_service or "").strip()
read_by_identifier = str(msg.read_by_identifier or "").strip()
return {
"id": str(msg.id),
@@ -343,12 +414,19 @@ def _serialize_message(msg: Message) -> dict:
"hide_text": hide_text,
"author": author,
"outgoing": _is_outgoing(msg),
"source_service": source_service,
"source_label": source_label,
"delivered_ts": delivered_ts,
"read_ts": read_ts,
"delivered_display": delivered_display,
"read_display": read_display,
"delivered_delta": delivered_delta,
"read_delta": read_delta,
"delivered_delta_display": delivered_delta_display,
"read_delta_display": read_delta_display,
"receipt_payload": receipt_payload,
"read_source_service": read_source_service,
"read_by_identifier": read_by_identifier,
}
@@ -510,9 +588,8 @@ def _workspace_conversation_for_person(user, person):
def _counterpart_identifiers_for_person(user, person):
if person is None:
return set()
values = (
PersonIdentifier.objects.filter(user=user, person=person)
.values_list("identifier", flat=True)
values = PersonIdentifier.objects.filter(user=user, person=person).values_list(
"identifier", flat=True
)
return {str(value or "").strip() for value in values if str(value or "").strip()}
@@ -598,13 +675,17 @@ def _build_thread_metric_fragments(conversation):
def _build_gap_fragment(is_outgoing_reply, lag_ms, snapshot):
metric_slug = "outbound_response_score" if is_outgoing_reply else "inbound_response_score"
metric_slug = (
"outbound_response_score" if is_outgoing_reply else "inbound_response_score"
)
copy = _metric_copy(metric_slug, "Response Score")
score_value = None
if snapshot is not None:
score_value = getattr(
snapshot,
"outbound_response_score" if is_outgoing_reply else "inbound_response_score",
"outbound_response_score"
if is_outgoing_reply
else "inbound_response_score",
None,
)
if score_value is None:
@@ -651,7 +732,9 @@ def _serialize_messages_with_artifacts(
item["metric_fragments"] = []
counterpart_identifiers = set(counterpart_identifiers or [])
snapshot = conversation.metric_snapshots.first() if conversation is not None else None
snapshot = (
conversation.metric_snapshots.first() if conversation is not None else None
)
prev_msg = seed_previous
prev_ts = int(prev_msg.ts or 0) if prev_msg is not None else None
@@ -663,7 +746,9 @@ def _serialize_messages_with_artifacts(
for idx, msg in enumerate(rows):
current_ts = int(msg.ts or 0)
current_outgoing = _message_is_outgoing_for_analysis(msg, counterpart_identifiers)
current_outgoing = _message_is_outgoing_for_analysis(
msg, counterpart_identifiers
)
if (
prev_msg is not None
and prev_ts is not None
@@ -680,7 +765,9 @@ def _serialize_messages_with_artifacts(
prev_outgoing = current_outgoing
if serialized:
serialized[-1]["metric_fragments"] = _build_thread_metric_fragments(conversation)
serialized[-1]["metric_fragments"] = _build_thread_metric_fragments(
conversation
)
return serialized
@@ -770,12 +857,7 @@ def _build_glance_items(serialized_messages, person_id=None):
def _owner_name(user) -> str:
return (
user.first_name
or user.get_full_name().strip()
or user.username
or "Me"
)
return user.first_name or user.get_full_name().strip() or user.username or "Me"
def _compose_ws_token(user_id, service, identifier, person_id):
@@ -789,7 +871,9 @@ def _compose_ws_token(user_id, service, identifier, person_id):
return signing.dumps(payload, salt=COMPOSE_WS_TOKEN_SALT)
def _compose_ai_cache_key(kind, user_id, service, identifier, person_id, last_ts, limit):
def _compose_ai_cache_key(
kind, user_id, service, identifier, person_id, last_ts, limit
):
raw = "|".join(
[
str(kind or ""),
@@ -825,7 +909,9 @@ def _engage_body_only(value):
def _messages_for_ai(user, person_identifier, limit):
if person_identifier is None:
return []
session, _ = ChatSession.objects.get_or_create(user=user, identifier=person_identifier)
session, _ = ChatSession.objects.get_or_create(
user=user, identifier=person_identifier
)
rows = list(
Message.objects.filter(user=user, session=session)
.select_related("session", "session__identifier", "session__identifier__person")
@@ -949,7 +1035,9 @@ def _trend_meta(current, previous, higher_is_better=True):
improves = is_up if higher_is_better else not is_up
return {
"direction": "up" if is_up else "down",
"icon": "fa-solid fa-arrow-trend-up" if is_up else "fa-solid fa-arrow-trend-down",
"icon": "fa-solid fa-arrow-trend-up"
if is_up
else "fa-solid fa-arrow-trend-down",
"class_name": "has-text-success" if improves else "has-text-danger",
"meaning": "Improving signal" if improves else "Risk signal",
}
@@ -1443,7 +1531,9 @@ def _manual_contact_rows(user):
if key in seen:
return
seen.add(key)
urls = _compose_urls(service_key, identifier_value, person.id if person else None)
urls = _compose_urls(
service_key, identifier_value, person.id if person else None
)
linked_person_name = person.name if person else ""
detected = _clean_detected_name(detected_name or account or "")
person_name = linked_person_name or detected or identifier_value
@@ -1502,7 +1592,9 @@ def _manual_contact_rows(user):
person=(linked.person if linked else None),
source="signal_chat",
account=str(chat.account or ""),
detected_name=_clean_detected_name(chat.source_name or chat.account or ""),
detected_name=_clean_detected_name(
chat.source_name or chat.account or ""
),
)
whatsapp_links = {
@@ -1529,7 +1621,9 @@ def _manual_contact_rows(user):
continue
if _normalize_contact_key(candidate) in wa_account_keys:
continue
detected_name = _clean_detected_name(item.get("name") or item.get("chat") or "")
detected_name = _clean_detected_name(
item.get("name") or item.get("chat") or ""
)
if detected_name.lower() == "linked account":
continue
linked = whatsapp_links.get(candidate)
@@ -1572,7 +1666,10 @@ def _recent_manual_contacts(
current_person_id = str(current_person.id) if current_person else ""
row_by_key = {
(str(row.get("service") or "").strip().lower(), str(row.get("identifier") or "").strip()): row
(
str(row.get("service") or "").strip().lower(),
str(row.get("identifier") or "").strip(),
): row
for row in all_rows
}
by_person_service = {}
@@ -1716,8 +1813,12 @@ def _recent_manual_contacts(
seen_unknown.add(unknown_key)
row["service_label"] = _service_label(service_key)
for svc in ("signal", "whatsapp", "instagram", "xmpp"):
row[f"{svc}_identifier"] = identifier_value if svc == service_key else ""
row[f"{svc}_compose_url"] = row.get("compose_url") if svc == service_key else ""
row[f"{svc}_identifier"] = (
identifier_value if svc == service_key else ""
)
row[f"{svc}_compose_url"] = (
row.get("compose_url") if svc == service_key else ""
)
row[f"{svc}_compose_widget_url"] = (
row.get("compose_widget_url") if svc == service_key else ""
)
@@ -1855,7 +1956,9 @@ def _panel_context(
for service_key in sorted(by_service.keys(), key=_service_order):
identifier_value = by_service[service_key]
option_urls = _compose_urls(service_key, identifier_value, base["person"].id)
option_urls = _compose_urls(
service_key, identifier_value, base["person"].id
)
platform_options.append(
{
"service": service_key,
@@ -2122,7 +2225,9 @@ class ComposeContactMatch(LoginRequiredMixin, View):
row.save(update_fields=["person"])
message = f"Re-linked {identifier} ({service}) to {person.name}."
else:
message = f"{identifier} ({service}) is already linked to {person.name}."
message = (
f"{identifier} ({service}) is already linked to {person.name}."
)
linked_companions = 0
skipped_companions = 0
@@ -2247,7 +2352,8 @@ class ComposeThread(LoginRequiredMixin, View):
user=request.user,
identifier=base["person_identifier"],
)
session_ids = list({*session_ids, int(session.id)})
# Don't convert UUIDs to int; keep them as UUIDs for the filter query
session_ids = list({*session_ids, session.id})
if session_ids:
base_queryset = Message.objects.filter(
user=request.user,
@@ -2264,8 +2370,7 @@ class ComposeThread(LoginRequiredMixin, View):
"session",
"session__identifier",
"session__identifier__person",
)
.order_by("ts")[:limit]
).order_by("ts")[:limit]
)
newest = (
Message.objects.filter(
@@ -2328,6 +2433,7 @@ class ComposeHistorySync(LoginRequiredMixin, View):
values.add(local)
return [value for value in values if value]
@classmethod
@classmethod
def _session_ids_for_scope(
cls,
@@ -2370,12 +2476,13 @@ class ComposeHistorySync(LoginRequiredMixin, View):
unique_ids.append(row_id)
if not unique_ids:
return []
return list(
result = list(
ChatSession.objects.filter(
user=user,
identifier_id__in=unique_ids,
).values_list("id", flat=True)
)
return result
@staticmethod
def _reconcile_duplicate_messages(user, session_ids):
@@ -2417,7 +2524,11 @@ class ComposeHistorySync(LoginRequiredMixin, View):
person = get_object_or_404(Person, id=person_id, user=request.user)
if not identifier and person is None:
return JsonResponse(
{"ok": False, "message": "Missing contact identifier.", "level": "danger"}
{
"ok": False,
"message": "Missing contact identifier.",
"level": "danger",
}
)
base = _context_base(request.user, service, identifier, person)
@@ -2575,6 +2686,44 @@ class ComposeHistorySync(LoginRequiredMixin, View):
)
class ComposeCancelSend(LoginRequiredMixin, View):
def post(self, request):
service = _default_service(request.POST.get("service"))
identifier = str(request.POST.get("identifier") or "").strip()
command_id = str(request.POST.get("command_id") or "").strip()
if not identifier:
return JsonResponse({"ok": False, "error": "missing_identifier"})
# If a specific command_id is supplied, cancel that command only.
if command_id:
ok = transport.cancel_runtime_command(service, command_id)
return JsonResponse({"ok": True, "cancelled": [command_id] if ok else []})
cancelled = transport.cancel_runtime_commands_for_recipient(service, identifier)
return JsonResponse({"ok": True, "cancelled": cancelled})
class ComposeCommandResult(LoginRequiredMixin, View):
"""Return the runtime command result for a queued send (if available).
GET parameters: `service`, `command_id`.
Returns JSON: if pending -> {"pending": True}, else returns the result dict.
"""
def get(self, request):
service = _default_service(request.GET.get("service"))
command_id = str(request.GET.get("command_id") or "").strip()
if not command_id:
return JsonResponse(
{"ok": False, "error": "missing_command_id"}, status=400
)
# Non-blocking check for runtime command result
result = async_to_sync(transport.wait_runtime_command_result)(
service, command_id, timeout=0.1
)
if result is None:
return JsonResponse({"pending": True})
return JsonResponse({"pending": False, "result": result})
class ComposeMediaBlob(LoginRequiredMixin, View):
"""
Serve cached media blobs for authenticated compose image previews.
@@ -2773,21 +2922,23 @@ class ComposeQuickInsights(LoginRequiredMixin, View):
"thread": "",
"last_event": "",
"last_ai_run": "",
"workspace_created": "",
"snapshot_count": 0,
"platform_docs": _metric_copy("platform", "Platform"),
"state_docs": _metric_copy("stability_state", "Participant State"),
"thread_docs": _metric_copy("thread", "Thread"),
"snapshot_docs": {
"calculation": (
"Count of stored workspace metric snapshots for this person."
),
"psychology": (
"More points improve trend reliability; sparse points are "
"best treated as directional signals."
"workspace_created": "",
"snapshot_count": 0,
"platform_docs": _metric_copy("platform", "Platform"),
"state_docs": _metric_copy(
"stability_state", "Participant State"
),
"thread_docs": _metric_copy("thread", "Thread"),
"snapshot_docs": {
"calculation": (
"Count of stored workspace metric snapshots for this person."
),
"psychology": (
"More points improve trend reliability; sparse points are "
"best treated as directional signals."
),
},
},
},
"rows": [],
"docs": [
"Quick Insights needs at least one workspace conversation snapshot.",
@@ -2935,7 +3086,9 @@ class ComposeEngagePreview(LoginRequiredMixin, View):
)
preview = str(payload.get("preview") or "").strip()
outbound = _engage_body_only(payload.get("outbound") or "")
artifact_label = f"{source_kind.title()}: {getattr(source_obj, 'title', '')}"
artifact_label = (
f"{source_kind.title()}: {getattr(source_obj, 'title', '')}"
)
else:
ai_obj = AI.objects.filter(user=request.user).first()
if ai_obj is not None:
@@ -3062,6 +3215,11 @@ class ComposeSend(LoginRequiredMixin, View):
"panel_id": str(panel_id or ""),
}
}
# Optional: include command id to allow client-side cancellation UI
if hasattr(request, "_compose_command_id") and request._compose_command_id:
trigger_payload["composeSendCommandId"] = {
"command_id": str(request._compose_command_id)
}
if ok:
trigger_payload["composeMessageSent"] = {"panel_id": str(panel_id or "")}
response["HX-Trigger"] = json.dumps(trigger_payload)
@@ -3104,12 +3262,48 @@ class ComposeSend(LoginRequiredMixin, View):
)
base = _context_base(request.user, service, identifier, person)
ts = async_to_sync(transport.send_message_raw)(
base["service"],
base["identifier"],
text=text,
attachments=[],
from core.util import logs as util_logs
logger = util_logs.get_logger("compose")
log_prefix = (
f"[ComposeSend] service={base['service']} identifier={base['identifier']}"
)
logger.info(f"{log_prefix} text_len={len(text)} attempting send")
# If runtime is out-of-process, enqueue command and return immediately (non-blocking).
# Expose command id for cancellation so the client can cancel or poll later.
runtime_client = transport.get_runtime_client(base["service"]) or None
logger.info(
f"{log_prefix} runtime_client={type(runtime_client).__name__ if runtime_client else 'None (queued)'}"
)
ts = None
command_id = None
if runtime_client is None:
logger.info(f"{log_prefix} enqueuing runtime command (out-of-process)")
command_id = transport.enqueue_runtime_command(
base["service"],
"send_message_raw",
{"recipient": base["identifier"], "text": text, "attachments": []},
)
logger.info(
f"{log_prefix} command_id={command_id} enqueued, returning immediately"
)
# attach command id to request so _response can include it in HX-Trigger
request._compose_command_id = command_id
# Do NOT wait here — return immediately so the UI doesn't block.
# Record a pending message locally so the thread shows the outgoing message.
ts = int(time.time() * 1000)
else:
# In-process runtime can perform the send synchronously and return a timestamp.
logger.info(f"{log_prefix} calling in-process send_message_raw (blocking)")
ts = async_to_sync(transport.send_message_raw)(
base["service"],
base["identifier"],
text=text,
attachments=[],
)
logger.info(f"{log_prefix} in-process send returned ts={ts}")
# For queued sends we set `ts` to a local timestamp; for in-process sends ts may be False.
if not ts:
return self._response(
request,
@@ -3124,15 +3318,34 @@ class ComposeSend(LoginRequiredMixin, View):
user=request.user,
identifier=base["person_identifier"],
)
Message.objects.create(
logger.info(f"{log_prefix} session_id={session.id}")
# For in-process sends (Signal, etc), ts is a timestamp or True.
# For queued sends (WhatsApp/UR), ts is a local timestamp.
# Set delivered_ts only if we got a real timestamp OR if it's an in-process sync send.
msg_ts = int(ts) if str(ts).isdigit() else int(time.time() * 1000)
delivered_ts = msg_ts if runtime_client is not None else None
msg = Message.objects.create(
user=request.user,
session=session,
sender_uuid="",
text=text,
ts=int(ts) if str(ts).isdigit() else int(time.time() * 1000),
delivered_ts=int(ts) if str(ts).isdigit() else None,
ts=msg_ts,
delivered_ts=delivered_ts,
custom_author="USER",
)
logger.info(
f"{log_prefix} created message id={msg.id} ts={msg_ts} delivered_ts={delivered_ts} custom_author=USER"
)
# If we enqueued, inform the client the message is queued and include command id.
if runtime_client is None:
return self._response(
request,
ok=True,
message="Message queued for sending.",
level="info",
panel_id=panel_id,
)
return self._response(
request,

View File

@@ -3,8 +3,8 @@ from mixins.views import ObjectCreate, ObjectDelete, ObjectUpdate
from core.forms import GroupForm
from core.models import Group
from core.views.osint import OSINTListBase
from core.util import logs
from core.views.osint import OSINTListBase
log = logs.get_logger(__name__)

View File

@@ -3,8 +3,8 @@ from mixins.views import ObjectCreate, ObjectDelete, ObjectUpdate
from core.forms import ManipulationForm
from core.models import Manipulation
from core.views.osint import OSINTListBase
from core.util import logs
from core.views.osint import OSINTListBase
log = logs.get_logger(__name__)

View File

@@ -17,7 +17,7 @@ from django.urls import reverse
from django.views import View
from mixins.views import ObjectList
from core.models import Group, Manipulation, Persona, Person
from core.models import Group, Manipulation, Person, Persona
def _context_type(request_type: str) -> str:
@@ -82,9 +82,7 @@ def _url_with_query(base_url: str, query: dict[str, Any]) -> str:
return f"{base_url}?{urlencode(params)}"
def _merge_query(
current_query: dict[str, Any], **updates: Any
) -> dict[str, Any]:
def _merge_query(current_query: dict[str, Any], **updates: Any) -> dict[str, Any]:
merged = dict(current_query)
for key, value in updates.items():
if value is None or str(value).strip() == "":
@@ -695,9 +693,7 @@ class OSINTSearch(LoginRequiredMixin, View):
per_page_default = 20
per_page_max = 100
def _field_options(
self, model_cls: type[models.Model]
) -> list[dict[str, str]]:
def _field_options(self, model_cls: type[models.Model]) -> list[dict[str, str]]:
options = []
for field in model_cls._meta.get_fields():
# Skip reverse/accessor relations (e.g. ManyToManyRel) that are not
@@ -768,16 +764,18 @@ class OSINTSearch(LoginRequiredMixin, View):
if isinstance(field, models.ForeignKey):
related_text_field = _preferred_related_text_field(field.related_model)
if related_text_field:
return Q(
**{f"{field_name}__{related_text_field}__icontains": query}
), False
return (
Q(**{f"{field_name}__{related_text_field}__icontains": query}),
False,
)
return Q(**{f"{field_name}__id__icontains": query}), False
if isinstance(field, models.ManyToManyField):
related_text_field = _preferred_related_text_field(field.related_model)
if related_text_field:
return Q(
**{f"{field_name}__{related_text_field}__icontains": query}
), True
return (
Q(**{f"{field_name}__{related_text_field}__icontains": query}),
True,
)
return Q(**{f"{field_name}__id__icontains": query}), True
return None, False

View File

@@ -3,8 +3,8 @@ from mixins.views import ObjectCreate, ObjectDelete, ObjectUpdate
from core.forms import PersonForm
from core.models import Person
from core.views.osint import OSINTListBase
from core.util import logs
from core.views.osint import OSINTListBase
log = logs.get_logger(__name__)

View File

@@ -3,8 +3,8 @@ from mixins.views import ObjectCreate, ObjectDelete, ObjectUpdate
from core.forms import PersonaForm
from core.models import Persona
from core.views.osint import OSINTListBase
from core.util import logs
from core.views.osint import OSINTListBase
log = logs.get_logger(__name__)

View File

@@ -1,9 +1,10 @@
from urllib.parse import urlencode
import orjson
import requests
from django.conf import settings
from django.shortcuts import render
from django.urls import reverse
from urllib.parse import urlencode
from django.views import View
from mixins.views import ObjectList, ObjectRead

View File

@@ -19,8 +19,8 @@ from core.models import (
PatternMitigationPlan,
PatternMitigationRule,
Person,
PersonIdentifier,
Persona,
PersonIdentifier,
QueuedMessage,
WorkspaceConversation,
WorkspaceMetricSnapshot,
@@ -37,7 +37,9 @@ class SystemSettings(SuperUserRequiredMixin, View):
"messages": Message.objects.filter(user=user).count(),
"queued_messages": QueuedMessage.objects.filter(user=user).count(),
"message_events": MessageEvent.objects.filter(user=user).count(),
"workspace_conversations": WorkspaceConversation.objects.filter(user=user).count(),
"workspace_conversations": WorkspaceConversation.objects.filter(
user=user
).count(),
"workspace_snapshots": WorkspaceMetricSnapshot.objects.filter(
conversation__user=user
).count(),
@@ -57,7 +59,9 @@ class SystemSettings(SuperUserRequiredMixin, View):
"mitigation_auto_settings": PatternMitigationAutoSettings.objects.filter(
user=user
).count(),
"mitigation_exports": PatternArtifactExport.objects.filter(user=user).count(),
"mitigation_exports": PatternArtifactExport.objects.filter(
user=user
).count(),
"osint_people": Person.objects.filter(user=user).count(),
"osint_identifiers": PersonIdentifier.objects.filter(user=user).count(),
"osint_groups": Group.objects.filter(user=user).count(),
@@ -77,7 +81,9 @@ class SystemSettings(SuperUserRequiredMixin, View):
deleted += AIResult.objects.filter(user=user).delete()[0]
deleted += AIRequest.objects.filter(user=user).delete()[0]
deleted += MemoryItem.objects.filter(user=user).delete()[0]
deleted += WorkspaceMetricSnapshot.objects.filter(conversation__user=user).delete()[0]
deleted += WorkspaceMetricSnapshot.objects.filter(
conversation__user=user
).delete()[0]
deleted += MessageEvent.objects.filter(user=user).delete()[0]
deleted += Message.objects.filter(user=user).delete()[0]
deleted += QueuedMessage.objects.filter(user=user).delete()[0]

View File

@@ -1,15 +1,16 @@
import time
from urllib.parse import urlencode
from django.shortcuts import render
from django.urls import reverse
from urllib.parse import urlencode
from django.views import View
from mixins.views import ObjectList, ObjectRead
from core.clients import transport
from core.models import ChatSession, Message, PersonIdentifier
from core.util import logs
from core.views.compose import _compose_urls, _service_icon_class
from core.views.manage.permissions import SuperUserRequiredMixin
from core.util import logs
import time
log = logs.get_logger("whatsapp_view")
@@ -32,16 +33,13 @@ class WhatsApp(SuperUserRequiredMixin, View):
)
def delete(self, request, *args, **kwargs):
account = (
str(request.GET.get("account") or "").strip()
or next(
(
str(item or "").strip()
for item in transport.list_accounts("whatsapp")
if str(item or "").strip()
),
"",
)
account = str(request.GET.get("account") or "").strip() or next(
(
str(item or "").strip()
for item in transport.list_accounts("whatsapp")
if str(item or "").strip()
),
"",
)
if account:
transport.unlink_account("whatsapp", account)
@@ -381,9 +379,7 @@ class WhatsAppAccountAdd(SuperUserRequiredMixin, ObjectRead):
def _detail_context(self, kwargs, obj):
detail_url_args = {
arg: kwargs[arg]
for arg in self.detail_url_args
if arg in kwargs
arg: kwargs[arg] for arg in self.detail_url_args if arg in kwargs
}
return {
"object": obj,
@@ -410,7 +406,9 @@ class WhatsAppAccountAdd(SuperUserRequiredMixin, ObjectRead):
sqlite_scanned = int(state.get("history_sqlite_scanned") or 0)
on_demand_requested = bool(state.get("history_on_demand_requested"))
on_demand_error = str(state.get("history_on_demand_error") or "").strip() or "-"
on_demand_anchor = str(state.get("history_on_demand_anchor") or "").strip() or "-"
on_demand_anchor = (
str(state.get("history_on_demand_anchor") or "").strip() or "-"
)
history_running = bool(state.get("history_sync_running"))
return [
f"connected={bool(state.get('connected'))}",

View File

@@ -147,8 +147,7 @@ INSIGHT_METRICS = {
"group": "stability",
"history_field": "stability_score",
"calculation": (
"0.35*reciprocity + 0.25*continuity + 0.20*response + "
"0.20*volatility."
"0.35*reciprocity + 0.25*continuity + 0.20*response + " "0.20*volatility."
),
"psychology": (
"Higher values suggest consistent mutual engagement patterns; falling "
@@ -176,9 +175,7 @@ INSIGHT_METRICS = {
"100 * min(1, distinct_sample_days / span_days). Higher means steadier "
"day-to-day continuity."
),
"psychology": (
"Drops can signal communication becoming episodic or reactive."
),
"psychology": ("Drops can signal communication becoming episodic or reactive."),
},
"response_score": {
"title": "Response Component",
@@ -232,8 +229,7 @@ INSIGHT_METRICS = {
"history_field": "stability_sample_days",
"calculation": "Count of distinct calendar days represented in the sample.",
"psychology": (
"Coverage across days better captures rhythm, not just intensity "
"bursts."
"Coverage across days better captures rhythm, not just intensity " "bursts."
),
},
"stability_computed": {
@@ -250,9 +246,7 @@ INSIGHT_METRICS = {
"title": "Commit In",
"group": "commitment",
"history_field": "commitment_inbound_score",
"calculation": (
"0.60*inbound_response_score + 0.40*inbound_balance_score."
),
"calculation": ("0.60*inbound_response_score + 0.40*inbound_balance_score."),
"psychology": (
"Estimates counterpart follow-through and reciprocity toward the user."
),
@@ -261,9 +255,7 @@ INSIGHT_METRICS = {
"title": "Commit Out",
"group": "commitment",
"history_field": "commitment_outbound_score",
"calculation": (
"0.60*outbound_response_score + 0.40*outbound_balance_score."
),
"calculation": ("0.60*outbound_response_score + 0.40*outbound_balance_score."),
"psychology": (
"Estimates user follow-through and consistency toward the counterpart."
),
@@ -931,16 +923,22 @@ def _metric_psychological_read(metric_slug, conversation):
if score is None:
return "Calibrating: collect more interaction data before interpreting."
if score >= 70:
return "Pattern suggests low relational friction and resilient repair cycles."
return (
"Pattern suggests low relational friction and resilient repair cycles."
)
if score >= 50:
return "Pattern suggests moderate strain; monitor for repeated escalation loops."
return "Pattern suggests high friction risk; prioritise safety and repair pacing."
return (
"Pattern suggests high friction risk; prioritise safety and repair pacing."
)
if metric_slug == "stability_confidence":
conf = conversation.stability_confidence or 0.0
if conf < 0.25:
return "Low certainty: treat this as a weak signal, not a conclusion."
if conf < 0.6:
return "Moderate certainty: useful directional cue, still context-dependent."
return (
"Moderate certainty: useful directional cue, still context-dependent."
)
return "High certainty: trend interpretation is likely reliable."
if metric_slug in {"commitment_inbound", "commitment_outbound"}:
inbound = conversation.commitment_inbound_score
@@ -3119,7 +3117,7 @@ def _ai_detect_violations(user, plan, person, recent_rows, metric_context=None):
"clarification": "proactive correction mapped to an artifact",
"severity": "low|medium|high",
}
]
],
},
}
prompt = [
@@ -3673,7 +3671,9 @@ class AIWorkspaceInformation(LoginRequiredMixin, View):
latest_snapshot = conversation.metric_snapshots.first()
directionality = _commitment_directionality_payload(conversation)
commitment_graph_cards = [
card for card in _all_graph_payload(conversation) if card["group"] == "commitment"
card
for card in _all_graph_payload(conversation)
if card["group"] == "commitment"
]
graph_refs = []