3444 lines
120 KiB
Python
3444 lines
120 KiB
Python
from __future__ import annotations
|
|
|
|
import hashlib
|
|
import json
|
|
import re
|
|
import time
|
|
from datetime import datetime
|
|
from datetime import timezone as dt_timezone
|
|
from difflib import SequenceMatcher
|
|
from urllib.parse import quote_plus, urlencode, urlparse
|
|
|
|
from asgiref.sync import async_to_sync
|
|
from django.conf import settings
|
|
from django.contrib.auth.mixins import LoginRequiredMixin
|
|
from django.core import signing
|
|
from django.core.cache import cache
|
|
from django.http import (
|
|
HttpResponse,
|
|
HttpResponseBadRequest,
|
|
HttpResponseNotFound,
|
|
JsonResponse,
|
|
)
|
|
from django.shortcuts import get_object_or_404, render
|
|
from django.urls import NoReverseMatch, reverse
|
|
from django.utils import timezone as dj_timezone
|
|
from django.views import View
|
|
|
|
from core.clients import transport
|
|
from core.messaging import ai as ai_runner
|
|
from core.messaging import media_bridge
|
|
from core.messaging.utils import messages_to_string
|
|
from core.models import (
|
|
AI,
|
|
Chat,
|
|
ChatSession,
|
|
Message,
|
|
MessageEvent,
|
|
PatternMitigationPlan,
|
|
Person,
|
|
PersonIdentifier,
|
|
WorkspaceConversation,
|
|
)
|
|
from core.realtime.typing_state import get_person_typing_state
|
|
from core.views.workspace import (
|
|
INSIGHT_METRICS,
|
|
_build_engage_payload,
|
|
_parse_draft_options,
|
|
)
|
|
|
|
COMPOSE_WS_TOKEN_SALT = "compose-ws"
|
|
COMPOSE_ENGAGE_TOKEN_SALT = "compose-engage"
|
|
COMPOSE_AI_CACHE_TTL = 60 * 30
|
|
URL_PATTERN = re.compile(r"https?://[^\s<>'\"\\]+")
|
|
SIGNAL_UUID_PATTERN = re.compile(
|
|
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
|
re.IGNORECASE,
|
|
)
|
|
IMAGE_EXTENSIONS = (
|
|
".png",
|
|
".jpg",
|
|
".jpeg",
|
|
".gif",
|
|
".webp",
|
|
".bmp",
|
|
".avif",
|
|
".svg",
|
|
)
|
|
EMPTY_TEXT_VALUES = {
|
|
"",
|
|
"[No Body]",
|
|
"[no body]",
|
|
"(no text)",
|
|
}
|
|
|
|
|
|
def _uniq_ordered(values):
|
|
seen = set()
|
|
output = []
|
|
for value in values:
|
|
cleaned = _clean_url(value)
|
|
if not cleaned or cleaned in seen:
|
|
continue
|
|
seen.add(cleaned)
|
|
output.append(cleaned)
|
|
return output
|
|
|
|
|
|
def _default_service(service: str | None) -> str:
|
|
value = str(service or "").strip().lower()
|
|
if value in {"signal", "whatsapp", "instagram", "xmpp"}:
|
|
return value
|
|
return "signal"
|
|
|
|
|
|
def _safe_limit(raw) -> int:
|
|
try:
|
|
value = int(raw or 40)
|
|
except (TypeError, ValueError):
|
|
value = 40
|
|
return max(10, min(value, 200))
|
|
|
|
|
|
def _safe_after_ts(raw) -> int:
|
|
try:
|
|
value = int(raw or 0)
|
|
except (TypeError, ValueError):
|
|
value = 0
|
|
return max(0, value)
|
|
|
|
|
|
def _format_ts_label(ts_value: int) -> str:
|
|
try:
|
|
as_dt = datetime.fromtimestamp(int(ts_value) / 1000, tz=dt_timezone.utc)
|
|
return dj_timezone.localtime(as_dt).strftime("%H:%M")
|
|
except Exception:
|
|
return str(ts_value or "")
|
|
|
|
|
|
def _is_outgoing(msg: Message) -> bool:
|
|
return str(msg.custom_author or "").upper() in {"USER", "BOT"}
|
|
|
|
|
|
def _clean_url(candidate: str) -> str:
|
|
return str(candidate or "").strip().rstrip(".,);:!?\"'")
|
|
|
|
|
|
def _extract_urls(text_value: str) -> list[str]:
|
|
found = []
|
|
for match in URL_PATTERN.findall(str(text_value or "")):
|
|
cleaned = _clean_url(match)
|
|
if cleaned and cleaned not in found:
|
|
found.append(cleaned)
|
|
return found
|
|
|
|
|
|
def _is_url_only_text(text_value: str) -> bool:
|
|
lines = [
|
|
line.strip() for line in str(text_value or "").splitlines() if line.strip()
|
|
]
|
|
if not lines:
|
|
return False
|
|
return all(bool(URL_PATTERN.fullmatch(line)) for line in lines)
|
|
|
|
|
|
def _looks_like_image_url(url_value: str) -> bool:
|
|
if not url_value:
|
|
return False
|
|
parsed = urlparse(url_value)
|
|
path = str(parsed.path or "").lower()
|
|
if path.endswith("/compose/media/blob/"):
|
|
return True
|
|
return path.endswith(IMAGE_EXTENSIONS)
|
|
|
|
|
|
def _is_xmpp_share_url(url_value: str) -> bool:
|
|
if not url_value:
|
|
return False
|
|
parsed = urlparse(url_value)
|
|
host = str(parsed.netloc or "").strip().lower()
|
|
configured = (
|
|
str(
|
|
getattr(settings, "XMPP_UPLOAD_SERVICE", "")
|
|
or getattr(settings, "XMPP_UPLOAD_JID", "")
|
|
)
|
|
.strip()
|
|
.lower()
|
|
)
|
|
if not configured:
|
|
return False
|
|
configured_host = configured
|
|
if "://" in configured:
|
|
configured_host = (urlparse(configured).netloc or configured_host).lower()
|
|
if "@" in configured_host:
|
|
configured_host = configured_host.split("@", 1)[-1]
|
|
configured_host = configured_host.split("/", 1)[0]
|
|
return host == configured_host
|
|
|
|
|
|
def _image_url_from_text(text_value: str) -> str:
|
|
urls = _image_urls_from_text(text_value)
|
|
return urls[0] if urls else ""
|
|
|
|
|
|
def _image_urls_from_text(text_value: str) -> list[str]:
|
|
urls = _uniq_ordered(_extract_urls(text_value))
|
|
if not urls:
|
|
return []
|
|
|
|
confident = [url for url in urls if _looks_like_image_url(url)]
|
|
if confident:
|
|
return confident
|
|
|
|
# Fallback: some XMPP upload URLs have no file extension.
|
|
if _is_url_only_text(text_value):
|
|
return urls
|
|
|
|
return []
|
|
|
|
|
|
def _looks_like_image_name(name_value: str) -> bool:
|
|
value = str(name_value or "").strip().lower()
|
|
return bool(value) and value.endswith(IMAGE_EXTENSIONS)
|
|
|
|
|
|
def _extract_attachment_image_urls(blob) -> list[str]:
|
|
urls = []
|
|
if isinstance(blob, str):
|
|
normalized = _clean_url(blob)
|
|
if normalized and _looks_like_image_url(normalized):
|
|
urls.append(normalized)
|
|
return urls
|
|
|
|
if isinstance(blob, dict):
|
|
content_type = (
|
|
str(
|
|
blob.get("content_type")
|
|
or blob.get("contentType")
|
|
or blob.get("mime_type")
|
|
or blob.get("mimetype")
|
|
or ""
|
|
)
|
|
.strip()
|
|
.lower()
|
|
)
|
|
filename = str(blob.get("filename") or blob.get("fileName") or "").strip()
|
|
image_hint = content_type.startswith("image/") or _looks_like_image_name(
|
|
filename
|
|
)
|
|
|
|
direct_urls = []
|
|
for key in ("url", "source_url", "download_url", "proxy_url", "href", "uri"):
|
|
normalized = _clean_url(blob.get(key))
|
|
if not normalized:
|
|
continue
|
|
if (
|
|
image_hint
|
|
or _looks_like_image_url(normalized)
|
|
or _is_xmpp_share_url(normalized)
|
|
):
|
|
direct_urls.append(normalized)
|
|
urls.extend(direct_urls)
|
|
blob_key = str(blob.get("blob_key") or "").strip()
|
|
# Prefer source-hosted URLs (for example share.zm.is) and use blob fallback only
|
|
# when no usable direct URL exists.
|
|
if blob_key and image_hint and not direct_urls:
|
|
urls.append(f"/compose/media/blob/?key={quote_plus(blob_key)}")
|
|
|
|
nested = blob.get("attachments")
|
|
if isinstance(nested, list):
|
|
for row in nested:
|
|
urls.extend(_extract_attachment_image_urls(row))
|
|
return urls
|
|
|
|
if isinstance(blob, list):
|
|
for row in blob:
|
|
urls.extend(_extract_attachment_image_urls(row))
|
|
return urls
|
|
|
|
|
|
def _attachment_image_urls_by_message(messages):
|
|
rows = list(messages or [])
|
|
if not rows:
|
|
return {}
|
|
|
|
by_message = {}
|
|
unresolved = []
|
|
|
|
for msg in rows:
|
|
text_value = str(msg.text or "").strip()
|
|
if text_value and text_value not in EMPTY_TEXT_VALUES:
|
|
continue
|
|
unresolved.append(msg)
|
|
|
|
if not unresolved:
|
|
return by_message
|
|
|
|
legacy_ids = [str(msg.id) for msg in unresolved]
|
|
linked_events = MessageEvent.objects.filter(
|
|
user=rows[0].user,
|
|
raw_payload_ref__legacy_message_id__in=legacy_ids,
|
|
).order_by("ts")
|
|
|
|
for event in linked_events:
|
|
legacy_id = str(
|
|
(event.raw_payload_ref or {}).get("legacy_message_id") or ""
|
|
).strip()
|
|
if not legacy_id:
|
|
continue
|
|
urls = _uniq_ordered(
|
|
_extract_attachment_image_urls(event.attachments)
|
|
+ _extract_attachment_image_urls(event.raw_payload_ref or {})
|
|
)
|
|
if urls:
|
|
by_message.setdefault(legacy_id, urls)
|
|
|
|
missing = [msg for msg in unresolved if str(msg.id) not in by_message]
|
|
if not missing:
|
|
return by_message
|
|
|
|
min_ts = min(int(msg.ts or 0) for msg in missing) - 3000
|
|
max_ts = max(int(msg.ts or 0) for msg in missing) + 3000
|
|
fallback_events = (
|
|
MessageEvent.objects.filter(
|
|
user=rows[0].user,
|
|
source_system="xmpp",
|
|
ts__gte=min_ts,
|
|
ts__lte=max_ts,
|
|
)
|
|
.exclude(attachments=[])
|
|
.order_by("ts")
|
|
)
|
|
fallback_list = list(fallback_events)
|
|
for msg in missing:
|
|
if str(msg.id) in by_message:
|
|
continue
|
|
msg_ts = int(msg.ts or 0)
|
|
candidates = [
|
|
event for event in fallback_list if abs(int(event.ts or 0) - msg_ts) <= 3000
|
|
]
|
|
if not candidates:
|
|
continue
|
|
event = candidates[0]
|
|
urls = _uniq_ordered(
|
|
_extract_attachment_image_urls(event.attachments)
|
|
+ _extract_attachment_image_urls(event.raw_payload_ref or {})
|
|
)
|
|
if urls:
|
|
by_message[str(msg.id)] = urls
|
|
|
|
return by_message
|
|
|
|
|
|
def _serialize_message(msg: Message) -> dict:
|
|
text_value = str(msg.text or "")
|
|
image_urls = _image_urls_from_text(text_value)
|
|
image_url = image_urls[0] if image_urls else ""
|
|
hide_text = bool(
|
|
image_urls
|
|
and _is_url_only_text(text_value)
|
|
and all(_looks_like_image_url(url) for url in image_urls)
|
|
)
|
|
display_text = (
|
|
text_value if text_value.strip() else ("(no text)" if not image_url else "")
|
|
)
|
|
author = str(msg.custom_author or "").strip()
|
|
is_outgoing = _is_outgoing(msg)
|
|
|
|
# Determine source service for display: prefer explicit session identifier service
|
|
source_service = "web"
|
|
try:
|
|
if getattr(msg, "session", None) and getattr(msg.session, "identifier", None):
|
|
svc = str(msg.session.identifier.service or "").strip().lower()
|
|
if svc:
|
|
source_service = svc
|
|
except Exception:
|
|
pass
|
|
sender_uuid_value = str(getattr(msg, "sender_uuid", "") or "").strip()
|
|
if sender_uuid_value.lower() == "xmpp":
|
|
source_service = "xmpp"
|
|
|
|
# Outgoing messages created by the web compose UI should be labeled Web Chat.
|
|
# Outgoing messages originating from platform runtimes (Signal sync, etc.)
|
|
# should keep their service label.
|
|
service_labels = {
|
|
"xmpp": "XMPP",
|
|
"whatsapp": "WhatsApp",
|
|
"signal": "Signal",
|
|
"instagram": "Instagram",
|
|
"web": "Web Chat",
|
|
}
|
|
if is_outgoing:
|
|
source_label = (
|
|
"Web Chat"
|
|
if not sender_uuid_value
|
|
else service_labels.get(
|
|
source_service, source_service.title() if source_service else "Unknown"
|
|
)
|
|
)
|
|
else:
|
|
source_label = service_labels.get(
|
|
source_service, source_service.title() if source_service else "Unknown"
|
|
)
|
|
|
|
# Ensure source_label is never empty for UI rendering
|
|
if not source_label:
|
|
source_label = "Unknown"
|
|
delivered_ts = int(msg.delivered_ts or 0)
|
|
read_ts = int(msg.read_ts or 0)
|
|
delivered_display = _format_ts_label(int(delivered_ts)) if delivered_ts else ""
|
|
read_display = _format_ts_label(int(read_ts)) if read_ts else ""
|
|
ts_val = int(msg.ts or 0)
|
|
delivered_delta = int(delivered_ts - ts_val) if delivered_ts and ts_val else None
|
|
read_delta = int(read_ts - ts_val) if read_ts and ts_val else None
|
|
# Human friendly delta strings
|
|
delivered_delta_display = (
|
|
_format_gap_duration(delivered_delta) if delivered_delta is not None else ""
|
|
)
|
|
read_delta_display = (
|
|
_format_gap_duration(read_delta) if read_delta is not None else ""
|
|
)
|
|
# Receipt payload and metadata
|
|
receipt_payload = msg.receipt_payload or {}
|
|
read_source_service = str(msg.read_source_service or "").strip()
|
|
read_by_identifier = str(msg.read_by_identifier or "").strip()
|
|
|
|
return {
|
|
"id": str(msg.id),
|
|
"ts": int(msg.ts or 0),
|
|
"display_ts": _format_ts_label(int(msg.ts or 0)),
|
|
"text": text_value,
|
|
"display_text": display_text,
|
|
"image_url": image_url,
|
|
"image_urls": image_urls,
|
|
"hide_text": hide_text,
|
|
"author": author,
|
|
"outgoing": _is_outgoing(msg),
|
|
"source_service": source_service,
|
|
"source_label": source_label,
|
|
"delivered_ts": delivered_ts,
|
|
"read_ts": read_ts,
|
|
"delivered_display": delivered_display,
|
|
"read_display": read_display,
|
|
"delivered_delta": delivered_delta,
|
|
"read_delta": read_delta,
|
|
"delivered_delta_display": delivered_delta_display,
|
|
"read_delta_display": read_delta_display,
|
|
"receipt_payload": receipt_payload,
|
|
"read_source_service": read_source_service,
|
|
"read_by_identifier": read_by_identifier,
|
|
}
|
|
|
|
|
|
THREAD_METRIC_FRAGMENT_SPECS = (
|
|
{
|
|
"slug": "stability_score",
|
|
"title": "Stability Score",
|
|
"source": "conversation",
|
|
"field": "stability_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "stability_confidence",
|
|
"title": "Stability Confidence",
|
|
"source": "conversation",
|
|
"field": "stability_confidence",
|
|
"precision": 3,
|
|
},
|
|
{
|
|
"slug": "sample_messages",
|
|
"title": "Sample Messages",
|
|
"source": "conversation",
|
|
"field": "stability_sample_messages",
|
|
"precision": 0,
|
|
},
|
|
{
|
|
"slug": "sample_days",
|
|
"title": "Sample Days",
|
|
"source": "conversation",
|
|
"field": "stability_sample_days",
|
|
"precision": 0,
|
|
},
|
|
{
|
|
"slug": "commitment_inbound",
|
|
"title": "Commit In",
|
|
"source": "conversation",
|
|
"field": "commitment_inbound_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "commitment_outbound",
|
|
"title": "Commit Out",
|
|
"source": "conversation",
|
|
"field": "commitment_outbound_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "commitment_confidence",
|
|
"title": "Commit Confidence",
|
|
"source": "conversation",
|
|
"field": "commitment_confidence",
|
|
"precision": 3,
|
|
},
|
|
{
|
|
"slug": "inbound_messages",
|
|
"title": "Inbound Messages",
|
|
"source": "snapshot",
|
|
"field": "inbound_messages",
|
|
"precision": 0,
|
|
},
|
|
{
|
|
"slug": "outbound_messages",
|
|
"title": "Outbound Messages",
|
|
"source": "snapshot",
|
|
"field": "outbound_messages",
|
|
"precision": 0,
|
|
},
|
|
{
|
|
"slug": "reciprocity_score",
|
|
"title": "Reciprocity",
|
|
"source": "snapshot",
|
|
"field": "reciprocity_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "continuity_score",
|
|
"title": "Continuity",
|
|
"source": "snapshot",
|
|
"field": "continuity_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "response_score",
|
|
"title": "Response",
|
|
"source": "snapshot",
|
|
"field": "response_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "volatility_score",
|
|
"title": "Volatility",
|
|
"source": "snapshot",
|
|
"field": "volatility_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "inbound_response_score",
|
|
"title": "Inbound Response",
|
|
"source": "snapshot",
|
|
"field": "inbound_response_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "outbound_response_score",
|
|
"title": "Outbound Response",
|
|
"source": "snapshot",
|
|
"field": "outbound_response_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "balance_inbound_score",
|
|
"title": "Inbound Balance",
|
|
"source": "snapshot",
|
|
"field": "balance_inbound_score",
|
|
"precision": 2,
|
|
},
|
|
{
|
|
"slug": "balance_outbound_score",
|
|
"title": "Outbound Balance",
|
|
"source": "snapshot",
|
|
"field": "balance_outbound_score",
|
|
"precision": 2,
|
|
},
|
|
)
|
|
|
|
THREAD_METRIC_COPY_OVERRIDES = {
|
|
"inbound_messages": {
|
|
"calculation": (
|
|
"Count of counterpart-to-user messages in the sampled analysis window."
|
|
),
|
|
"psychology": (
|
|
"Lower counts can indicate reduced reach-back or temporary withdrawal."
|
|
),
|
|
},
|
|
"outbound_messages": {
|
|
"calculation": (
|
|
"Count of user-to-counterpart messages in the sampled analysis window."
|
|
),
|
|
"psychology": (
|
|
"Large imbalances can reflect chasing or over-functioning dynamics."
|
|
),
|
|
},
|
|
}
|
|
|
|
|
|
def _workspace_conversation_for_person(user, person):
|
|
if person is None:
|
|
return None
|
|
return (
|
|
WorkspaceConversation.objects.filter(
|
|
user=user,
|
|
participants=person,
|
|
)
|
|
.order_by("-last_event_ts", "-created_at")
|
|
.first()
|
|
)
|
|
|
|
|
|
def _counterpart_identifiers_for_person(user, person):
|
|
if person is None:
|
|
return set()
|
|
values = PersonIdentifier.objects.filter(user=user, person=person).values_list(
|
|
"identifier", flat=True
|
|
)
|
|
return {str(value or "").strip() for value in values if str(value or "").strip()}
|
|
|
|
|
|
def _message_is_outgoing_for_analysis(msg, counterpart_identifiers):
|
|
sender = str(getattr(msg, "sender_uuid", "") or "").strip()
|
|
if sender and sender in counterpart_identifiers:
|
|
return False
|
|
return _is_outgoing(msg)
|
|
|
|
|
|
def _format_gap_duration(ms_value):
|
|
value = max(0, int(ms_value or 0))
|
|
seconds = value // 1000
|
|
if seconds < 60:
|
|
return f"{seconds}s"
|
|
minutes = seconds // 60
|
|
if minutes < 60:
|
|
return f"{minutes}m"
|
|
hours = minutes // 60
|
|
rem_minutes = minutes % 60
|
|
if rem_minutes == 0:
|
|
return f"{hours}h"
|
|
return f"{hours}h {rem_minutes}m"
|
|
|
|
|
|
def _score_from_lag_for_thread(lag_ms, target_hours=4):
|
|
if lag_ms is None:
|
|
return 50.0
|
|
target_ms = max(1, target_hours) * 60 * 60 * 1000
|
|
return max(0.0, min(100.0, 100.0 / (1.0 + (lag_ms / target_ms))))
|
|
|
|
|
|
def _metric_copy(slug, fallback_title):
|
|
spec = INSIGHT_METRICS.get(slug) or {}
|
|
override = THREAD_METRIC_COPY_OVERRIDES.get(slug) or {}
|
|
return {
|
|
"title": spec.get("title") or fallback_title,
|
|
"calculation": override.get("calculation") or spec.get("calculation") or "",
|
|
"psychology": override.get("psychology") or spec.get("psychology") or "",
|
|
}
|
|
|
|
|
|
def _format_metric_fragment_value(value, precision):
|
|
if value is None:
|
|
return "-"
|
|
try:
|
|
number = float(value)
|
|
except (TypeError, ValueError):
|
|
return str(value)
|
|
if int(precision or 0) <= 0:
|
|
return str(int(round(number)))
|
|
rounded = round(number, int(precision))
|
|
if float(rounded).is_integer():
|
|
return str(int(rounded))
|
|
return f"{rounded:.{int(precision)}f}"
|
|
|
|
|
|
def _build_thread_metric_fragments(conversation):
|
|
if conversation is None:
|
|
return []
|
|
snapshot = conversation.metric_snapshots.first()
|
|
fragments = []
|
|
for spec in THREAD_METRIC_FRAGMENT_SPECS:
|
|
if spec["source"] == "snapshot":
|
|
source_obj = snapshot
|
|
else:
|
|
source_obj = conversation
|
|
if source_obj is None:
|
|
continue
|
|
value = getattr(source_obj, spec["field"], None)
|
|
copy = _metric_copy(spec["slug"], spec["title"])
|
|
fragments.append(
|
|
{
|
|
"slug": spec["slug"],
|
|
"title": copy["title"],
|
|
"value": _format_metric_fragment_value(value, spec.get("precision", 2)),
|
|
"calculation": copy["calculation"],
|
|
"psychology": copy["psychology"],
|
|
}
|
|
)
|
|
return fragments
|
|
|
|
|
|
def _build_gap_fragment(is_outgoing_reply, lag_ms, snapshot):
|
|
metric_slug = (
|
|
"outbound_response_score" if is_outgoing_reply else "inbound_response_score"
|
|
)
|
|
copy = _metric_copy(metric_slug, "Response Score")
|
|
score_value = None
|
|
if snapshot is not None:
|
|
score_value = getattr(
|
|
snapshot,
|
|
"outbound_response_score"
|
|
if is_outgoing_reply
|
|
else "inbound_response_score",
|
|
None,
|
|
)
|
|
if score_value is None:
|
|
score_value = _score_from_lag_for_thread(lag_ms)
|
|
score_value = max(0.0, min(100.0, float(score_value)))
|
|
return {
|
|
"slug": metric_slug,
|
|
"title": "Unseen Gap",
|
|
"focus": "Your reply delay" if is_outgoing_reply else "Counterpart reply delay",
|
|
"lag": _format_gap_duration(lag_ms),
|
|
"lag_ms": int(lag_ms or 0),
|
|
"score_value": round(score_value, 2),
|
|
"score": _format_metric_fragment_value(score_value, 2),
|
|
"calculation": copy["calculation"],
|
|
"psychology": copy["psychology"],
|
|
}
|
|
|
|
|
|
def _serialize_messages_with_artifacts(
|
|
messages,
|
|
counterpart_identifiers=None,
|
|
conversation=None,
|
|
seed_previous=None,
|
|
):
|
|
rows = list(messages or [])
|
|
serialized = [_serialize_message(msg) for msg in rows]
|
|
attachment_images = _attachment_image_urls_by_message(rows)
|
|
for idx, msg in enumerate(rows):
|
|
item = serialized[idx]
|
|
if item.get("image_urls"):
|
|
continue
|
|
recovered = _uniq_ordered(attachment_images.get(str(msg.id)) or [])
|
|
if not recovered:
|
|
continue
|
|
item["image_urls"] = recovered
|
|
item["image_url"] = recovered[0]
|
|
text_value = str(msg.text or "").strip()
|
|
if text_value in EMPTY_TEXT_VALUES:
|
|
item["hide_text"] = True
|
|
item["display_text"] = ""
|
|
|
|
for item in serialized:
|
|
item["gap_fragments"] = []
|
|
item["metric_fragments"] = []
|
|
|
|
counterpart_identifiers = set(counterpart_identifiers or [])
|
|
snapshot = (
|
|
conversation.metric_snapshots.first() if conversation is not None else None
|
|
)
|
|
|
|
prev_msg = seed_previous
|
|
prev_ts = int(prev_msg.ts or 0) if prev_msg is not None else None
|
|
prev_outgoing = (
|
|
_message_is_outgoing_for_analysis(prev_msg, counterpart_identifiers)
|
|
if prev_msg is not None
|
|
else None
|
|
)
|
|
|
|
for idx, msg in enumerate(rows):
|
|
current_ts = int(msg.ts or 0)
|
|
current_outgoing = _message_is_outgoing_for_analysis(
|
|
msg, counterpart_identifiers
|
|
)
|
|
if (
|
|
prev_msg is not None
|
|
and prev_ts is not None
|
|
and prev_outgoing is not None
|
|
and current_outgoing != prev_outgoing
|
|
and current_ts >= prev_ts
|
|
):
|
|
lag_ms = current_ts - prev_ts
|
|
serialized[idx]["gap_fragments"].append(
|
|
_build_gap_fragment(current_outgoing, lag_ms, snapshot)
|
|
)
|
|
prev_msg = msg
|
|
prev_ts = current_ts
|
|
prev_outgoing = current_outgoing
|
|
|
|
if serialized:
|
|
serialized[-1]["metric_fragments"] = _build_thread_metric_fragments(
|
|
conversation
|
|
)
|
|
|
|
return serialized
|
|
|
|
|
|
def _insight_detail_url(person_id, metric_slug):
|
|
if not person_id or not metric_slug:
|
|
return ""
|
|
try:
|
|
return reverse(
|
|
"ai_workspace_insight_detail",
|
|
kwargs={
|
|
"type": "page",
|
|
"person_id": person_id,
|
|
"metric": str(metric_slug),
|
|
},
|
|
)
|
|
except NoReverseMatch:
|
|
return ""
|
|
|
|
|
|
def _glance_items_from_state(gap_fragment=None, metric_fragments=None, person_id=None):
|
|
items = []
|
|
if gap_fragment:
|
|
tooltip_parts = [
|
|
f"{gap_fragment.get('focus') or 'Response delay'}",
|
|
f"Delay {gap_fragment.get('lag') or '-'}",
|
|
f"Score {gap_fragment.get('score') or '-'}",
|
|
]
|
|
if gap_fragment.get("calculation"):
|
|
tooltip_parts.append(
|
|
f"How it is calculated: {gap_fragment.get('calculation')}"
|
|
)
|
|
if gap_fragment.get("psychology"):
|
|
tooltip_parts.append(
|
|
f"Psychological interpretation: {gap_fragment.get('psychology')}"
|
|
)
|
|
items.append(
|
|
{
|
|
"label": "Response Delay",
|
|
"value": f"{gap_fragment.get('lag') or '-'} · {gap_fragment.get('score') or '-'}",
|
|
"tooltip": " | ".join(tooltip_parts),
|
|
"url": _insight_detail_url(
|
|
person_id,
|
|
gap_fragment.get("slug") or "inbound_response_score",
|
|
),
|
|
}
|
|
)
|
|
|
|
metric_fragments = list(metric_fragments or [])
|
|
for metric in metric_fragments[:2]:
|
|
tooltip_parts = []
|
|
if metric.get("calculation"):
|
|
tooltip_parts.append(f"How it is calculated: {metric.get('calculation')}")
|
|
if metric.get("psychology"):
|
|
tooltip_parts.append(
|
|
f"Psychological interpretation: {metric.get('psychology')}"
|
|
)
|
|
items.append(
|
|
{
|
|
"label": str(metric.get("title") or "Metric"),
|
|
"value": str(metric.get("value") or "-"),
|
|
"tooltip": " | ".join(tooltip_parts),
|
|
"url": _insight_detail_url(person_id, metric.get("slug")),
|
|
}
|
|
)
|
|
return items[:3]
|
|
|
|
|
|
def _build_glance_items(serialized_messages, person_id=None):
|
|
rows = list(serialized_messages or [])
|
|
latest_metrics = []
|
|
latest_gap = None
|
|
for row in reversed(rows):
|
|
row_metrics = list(row.get("metric_fragments") or [])
|
|
if row_metrics and not latest_metrics:
|
|
latest_metrics = row_metrics
|
|
row_gaps = list(row.get("gap_fragments") or [])
|
|
if row_gaps and latest_gap is None:
|
|
latest_gap = row_gaps[0]
|
|
if latest_metrics and latest_gap:
|
|
break
|
|
return _glance_items_from_state(
|
|
latest_gap,
|
|
latest_metrics,
|
|
person_id=person_id,
|
|
)
|
|
|
|
|
|
def _owner_name(user) -> str:
|
|
return user.first_name or user.get_full_name().strip() or user.username or "Me"
|
|
|
|
|
|
def _compose_ws_token(user_id, service, identifier, person_id):
|
|
payload = {
|
|
"u": int(user_id),
|
|
"s": str(service or ""),
|
|
"i": str(identifier or ""),
|
|
"p": str(person_id) if person_id else "",
|
|
"exp": int(time.time()) + (60 * 60 * 12),
|
|
}
|
|
return signing.dumps(payload, salt=COMPOSE_WS_TOKEN_SALT)
|
|
|
|
|
|
def _compose_ai_cache_key(
|
|
kind, user_id, service, identifier, person_id, last_ts, limit
|
|
):
|
|
raw = "|".join(
|
|
[
|
|
str(kind or ""),
|
|
str(user_id),
|
|
str(service or ""),
|
|
str(identifier or ""),
|
|
str(person_id or ""),
|
|
str(last_ts or 0),
|
|
str(limit or 0),
|
|
]
|
|
)
|
|
digest = hashlib.sha1(raw.encode("utf-8")).hexdigest()
|
|
return f"compose:{kind}:{digest}"
|
|
|
|
|
|
def _plain_text(value):
|
|
cleaned = re.sub(r"\s+", " ", str(value or "").strip())
|
|
cleaned = re.sub(r"^\s*#{1,6}\s*", "", cleaned)
|
|
cleaned = re.sub(r"\*\*(.*?)\*\*", r"\1", cleaned)
|
|
cleaned = re.sub(r"`(.*?)`", r"\1", cleaned)
|
|
return cleaned.strip()
|
|
|
|
|
|
def _engage_body_only(value):
|
|
lines = [line.strip() for line in str(value or "").splitlines() if line.strip()]
|
|
if lines and lines[0].startswith("**"):
|
|
lines = lines[1:]
|
|
if lines and lines[0].lower() == "guidance:":
|
|
lines = lines[1:]
|
|
return _plain_text(" ".join(lines))
|
|
|
|
|
|
def _messages_for_ai(user, person_identifier, limit):
|
|
if person_identifier is None:
|
|
return []
|
|
session, _ = ChatSession.objects.get_or_create(
|
|
user=user, identifier=person_identifier
|
|
)
|
|
rows = list(
|
|
Message.objects.filter(user=user, session=session)
|
|
.select_related("session", "session__identifier", "session__identifier__person")
|
|
.order_by("-ts")[:limit]
|
|
)
|
|
rows.reverse()
|
|
return rows
|
|
|
|
|
|
def _fallback_drafts():
|
|
return [
|
|
{
|
|
"label": "Soft",
|
|
"text": "I want us to stay connected. I am listening and I want to understand your perspective clearly.",
|
|
},
|
|
{
|
|
"label": "Neutral",
|
|
"text": "I hear your point. Let us clarify what each of us means so we can move forward constructively.",
|
|
},
|
|
{
|
|
"label": "Firm",
|
|
"text": "I want to resolve this respectfully. I will continue when we can keep the conversation constructive.",
|
|
},
|
|
]
|
|
|
|
|
|
def _build_draft_prompt(owner_name, person_name, transcript):
|
|
return [
|
|
{
|
|
"role": "system",
|
|
"content": (
|
|
"Generate exactly three short reply drafts for a chat. "
|
|
"Return labels Soft, Neutral, Firm. "
|
|
"Format:\nSoft: ...\nNeutral: ...\nFirm: ...\n"
|
|
"Each draft must be one to two sentences, plain text, no markdown."
|
|
),
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": (
|
|
f"Me: {owner_name}\n"
|
|
f"Other: {person_name}\n"
|
|
f"Conversation:\n{transcript}"
|
|
),
|
|
},
|
|
]
|
|
|
|
|
|
def _build_summary_prompt(owner_name, person_name, transcript):
|
|
return [
|
|
{
|
|
"role": "system",
|
|
"content": (
|
|
"Create a concise conversation summary with three sections. "
|
|
"Use this exact structure:\n"
|
|
"Headlines:\n- ...\n"
|
|
"Patterns:\n- ...\n"
|
|
"Suggested Next Message:\n- ...\n"
|
|
"Keep each bullet practical and specific."
|
|
),
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": (
|
|
f"Me: {owner_name}\n"
|
|
f"Other: {person_name}\n"
|
|
f"Conversation:\n{transcript}"
|
|
),
|
|
},
|
|
]
|
|
|
|
|
|
def _to_float(value):
|
|
if value is None:
|
|
return None
|
|
try:
|
|
return float(value)
|
|
except (TypeError, ValueError):
|
|
return None
|
|
|
|
|
|
def _format_number(value, precision=2):
|
|
number = _to_float(value)
|
|
if number is None:
|
|
return "-"
|
|
rounded = round(number, precision)
|
|
if float(rounded).is_integer():
|
|
return str(int(rounded))
|
|
return f"{rounded:.{precision}f}"
|
|
|
|
|
|
def _percent_change(current, previous):
|
|
now_val = _to_float(current)
|
|
prev_val = _to_float(previous)
|
|
if now_val is None or prev_val is None:
|
|
return None
|
|
if abs(prev_val) < 1e-9:
|
|
return None
|
|
return ((now_val - prev_val) / abs(prev_val)) * 100.0
|
|
|
|
|
|
def _trend_meta(current, previous, higher_is_better=True):
|
|
now_val = _to_float(current)
|
|
prev_val = _to_float(previous)
|
|
if now_val is None or prev_val is None:
|
|
return {
|
|
"direction": "unknown",
|
|
"icon": "fa-solid fa-minus",
|
|
"class_name": "has-text-grey",
|
|
"meaning": "No comparison yet",
|
|
}
|
|
delta = now_val - prev_val
|
|
if abs(delta) < 1e-9:
|
|
return {
|
|
"direction": "flat",
|
|
"icon": "fa-solid fa-minus",
|
|
"class_name": "has-text-grey",
|
|
"meaning": "No meaningful change",
|
|
}
|
|
is_up = delta > 0
|
|
improves = is_up if higher_is_better else not is_up
|
|
return {
|
|
"direction": "up" if is_up else "down",
|
|
"icon": "fa-solid fa-arrow-trend-up"
|
|
if is_up
|
|
else "fa-solid fa-arrow-trend-down",
|
|
"class_name": "has-text-success" if improves else "has-text-danger",
|
|
"meaning": "Improving signal" if improves else "Risk signal",
|
|
}
|
|
|
|
|
|
def _emotion_meta(metric_kind, value):
|
|
score = _to_float(value)
|
|
if score is None:
|
|
return {
|
|
"icon": "fa-regular fa-face-meh-blank",
|
|
"class_name": "has-text-grey",
|
|
"label": "Unknown",
|
|
}
|
|
if metric_kind == "confidence":
|
|
score = score * 100.0
|
|
if metric_kind == "count":
|
|
if score >= 80:
|
|
return {
|
|
"icon": "fa-solid fa-chart-column",
|
|
"class_name": "has-text-success",
|
|
"label": "Rich Data",
|
|
}
|
|
if score >= 30:
|
|
return {
|
|
"icon": "fa-solid fa-chart-simple",
|
|
"class_name": "has-text-warning",
|
|
"label": "Moderate Data",
|
|
}
|
|
return {
|
|
"icon": "fa-solid fa-chart-line",
|
|
"class_name": "has-text-danger",
|
|
"label": "Sparse Data",
|
|
}
|
|
if score >= 75:
|
|
return {
|
|
"icon": "fa-regular fa-face-smile",
|
|
"class_name": "has-text-success",
|
|
"label": "Positive",
|
|
}
|
|
if score >= 50:
|
|
return {
|
|
"icon": "fa-regular fa-face-meh",
|
|
"class_name": "has-text-warning",
|
|
"label": "Mixed",
|
|
}
|
|
return {
|
|
"icon": "fa-regular fa-face-frown",
|
|
"class_name": "has-text-danger",
|
|
"label": "Strained",
|
|
}
|
|
|
|
|
|
def _quick_insights_rows(conversation):
|
|
latest = conversation.metric_snapshots.first()
|
|
previous = (
|
|
conversation.metric_snapshots.order_by("-computed_at")[1:2].first()
|
|
if conversation.metric_snapshots.count() > 1
|
|
else None
|
|
)
|
|
metric_specs = [
|
|
{
|
|
"key": "stability_score",
|
|
"label": "Stability Score",
|
|
"doc_slug": "stability_score",
|
|
"field": "stability_score",
|
|
"source": "conversation",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-heart-pulse",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "stability_confidence",
|
|
"label": "Stability Confidence",
|
|
"doc_slug": "stability_confidence",
|
|
"field": "stability_confidence",
|
|
"source": "conversation",
|
|
"kind": "confidence",
|
|
"icon": "fa-solid fa-shield-check",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "sample_messages",
|
|
"label": "Sample Messages",
|
|
"doc_slug": "sample_messages",
|
|
"field": "stability_sample_messages",
|
|
"source": "conversation",
|
|
"kind": "count",
|
|
"icon": "fa-solid fa-message",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "sample_days",
|
|
"label": "Sample Days",
|
|
"doc_slug": "sample_days",
|
|
"field": "stability_sample_days",
|
|
"source": "conversation",
|
|
"kind": "count",
|
|
"icon": "fa-solid fa-calendar-days",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "commitment_inbound",
|
|
"label": "Commit In",
|
|
"doc_slug": "commitment_inbound",
|
|
"field": "commitment_inbound_score",
|
|
"source": "conversation",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-inbox",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "commitment_outbound",
|
|
"label": "Commit Out",
|
|
"doc_slug": "commitment_outbound",
|
|
"field": "commitment_outbound_score",
|
|
"source": "conversation",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-paper-plane",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "commitment_confidence",
|
|
"label": "Commit Confidence",
|
|
"doc_slug": "commitment_confidence",
|
|
"field": "commitment_confidence",
|
|
"source": "conversation",
|
|
"kind": "confidence",
|
|
"icon": "fa-solid fa-badge-check",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "reciprocity",
|
|
"label": "Reciprocity",
|
|
"doc_slug": "reciprocity_score",
|
|
"field": "reciprocity_score",
|
|
"source": "snapshot",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-right-left",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "continuity",
|
|
"label": "Continuity",
|
|
"doc_slug": "continuity_score",
|
|
"field": "continuity_score",
|
|
"source": "snapshot",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-link",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "response",
|
|
"label": "Response",
|
|
"doc_slug": "response_score",
|
|
"field": "response_score",
|
|
"source": "snapshot",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-gauge-high",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "volatility",
|
|
"label": "Volatility",
|
|
"doc_slug": "volatility_score",
|
|
"field": "volatility_score",
|
|
"source": "snapshot",
|
|
"kind": "score",
|
|
"icon": "fa-solid fa-wave-square",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "inbound_messages",
|
|
"label": "Inbound Messages",
|
|
"doc_slug": "inbound_messages",
|
|
"field": "inbound_messages",
|
|
"source": "snapshot",
|
|
"kind": "count",
|
|
"icon": "fa-solid fa-arrow-down",
|
|
"higher_better": True,
|
|
},
|
|
{
|
|
"key": "outbound_messages",
|
|
"label": "Outbound Messages",
|
|
"doc_slug": "outbound_messages",
|
|
"field": "outbound_messages",
|
|
"source": "snapshot",
|
|
"kind": "count",
|
|
"icon": "fa-solid fa-arrow-up",
|
|
"higher_better": True,
|
|
},
|
|
]
|
|
rows = []
|
|
for spec in metric_specs:
|
|
field_name = spec["field"]
|
|
metric_copy = _metric_copy(spec.get("doc_slug") or spec["key"], spec["label"])
|
|
if spec["source"] == "conversation":
|
|
current = getattr(conversation, field_name, None)
|
|
previous_value = getattr(previous, field_name, None) if previous else None
|
|
else:
|
|
current = getattr(latest, field_name, None) if latest else None
|
|
previous_value = getattr(previous, field_name, None) if previous else None
|
|
trend = _trend_meta(
|
|
current,
|
|
previous_value,
|
|
higher_is_better=spec.get("higher_better", True),
|
|
)
|
|
delta_pct = _percent_change(current, previous_value)
|
|
point_count = conversation.metric_snapshots.exclude(
|
|
**{f"{field_name}__isnull": True}
|
|
).count()
|
|
emotion = _emotion_meta(spec["kind"], current)
|
|
rows.append(
|
|
{
|
|
"key": spec["key"],
|
|
"label": spec["label"],
|
|
"icon": spec["icon"],
|
|
"value": current,
|
|
"display_value": _format_number(
|
|
current,
|
|
3 if spec["kind"] == "confidence" else 2,
|
|
),
|
|
"delta_pct": delta_pct,
|
|
"delta_label": f"{delta_pct:+.2f}%" if delta_pct is not None else "n/a",
|
|
"point_count": point_count,
|
|
"trend": trend,
|
|
"emotion": emotion,
|
|
"calculation": metric_copy.get("calculation") or "",
|
|
"psychology": metric_copy.get("psychology") or "",
|
|
}
|
|
)
|
|
return {
|
|
"rows": rows,
|
|
"snapshot_count": conversation.metric_snapshots.count(),
|
|
"latest_computed_at": latest.computed_at if latest else None,
|
|
}
|
|
|
|
|
|
def _participant_feedback_state_label(conversation, person):
|
|
payload = conversation.participant_feedback or {}
|
|
if not isinstance(payload, dict) or person is None:
|
|
return ""
|
|
raw = payload.get(str(person.id)) or {}
|
|
if not isinstance(raw, dict):
|
|
return ""
|
|
state_key = str(raw.get("state") or "").strip().lower()
|
|
return {
|
|
"withdrawing": "Withdrawing",
|
|
"overextending": "Overextending",
|
|
"balanced": "Balanced",
|
|
}.get(state_key, "")
|
|
|
|
|
|
def _build_engage_prompt(owner_name, person_name, transcript):
|
|
return [
|
|
{
|
|
"role": "system",
|
|
"content": (
|
|
"Write one short de-escalating outreach in shared framing. "
|
|
"Use 'we/us/our' only. No names. One or two sentences."
|
|
),
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": (
|
|
f"Me: {owner_name}\n"
|
|
f"Other: {person_name}\n"
|
|
f"Conversation:\n{transcript}"
|
|
),
|
|
},
|
|
]
|
|
|
|
|
|
def _latest_plan_for_person(user, person):
|
|
if person is None:
|
|
return None
|
|
conversation = (
|
|
PatternMitigationPlan.objects.filter(
|
|
user=user,
|
|
conversation__participants=person,
|
|
)
|
|
.select_related("conversation")
|
|
.order_by("-updated_at")
|
|
.first()
|
|
)
|
|
return conversation
|
|
|
|
|
|
def _best_engage_source(plan):
|
|
if plan is None:
|
|
return (None, "")
|
|
correction = plan.corrections.order_by("-created_at").first()
|
|
if correction:
|
|
return (correction, "correction")
|
|
rule = plan.rules.order_by("-created_at").first()
|
|
if rule:
|
|
return (rule, "rule")
|
|
game = plan.games.order_by("-created_at").first()
|
|
if game:
|
|
return (game, "game")
|
|
return (None, "")
|
|
|
|
|
|
def _engage_source_options(plan):
|
|
if plan is None:
|
|
return []
|
|
options = []
|
|
for rule in plan.rules.order_by("created_at"):
|
|
options.append(
|
|
{
|
|
"value": f"rule:{rule.id}",
|
|
"label": f"Rule: {rule.title}",
|
|
}
|
|
)
|
|
for game in plan.games.order_by("created_at"):
|
|
options.append(
|
|
{
|
|
"value": f"game:{game.id}",
|
|
"label": f"Game: {game.title}",
|
|
}
|
|
)
|
|
for correction in plan.corrections.order_by("created_at"):
|
|
options.append(
|
|
{
|
|
"value": f"correction:{correction.id}",
|
|
"label": f"Correction: {correction.title}",
|
|
}
|
|
)
|
|
return options
|
|
|
|
|
|
def _engage_source_from_ref(plan, source_ref):
|
|
if plan is None:
|
|
return (None, "", "")
|
|
ref = str(source_ref or "").strip()
|
|
if ":" not in ref:
|
|
return (None, "", "")
|
|
kind, raw_id = ref.split(":", 1)
|
|
kind = kind.strip().lower()
|
|
raw_id = raw_id.strip()
|
|
model_by_kind = {
|
|
"rule": plan.rules,
|
|
"game": plan.games,
|
|
"correction": plan.corrections,
|
|
}
|
|
queryset = model_by_kind.get(kind)
|
|
if queryset is None:
|
|
return (None, "", "")
|
|
obj = queryset.filter(id=raw_id).first()
|
|
if obj is None:
|
|
return (None, "", "")
|
|
return (obj, kind, f"{kind}:{obj.id}")
|
|
|
|
|
|
def _context_base(user, service, identifier, person):
|
|
person_identifier = None
|
|
if person is not None:
|
|
if identifier:
|
|
person_identifier = PersonIdentifier.objects.filter(
|
|
user=user,
|
|
person=person,
|
|
service=service,
|
|
identifier=identifier,
|
|
).first()
|
|
if person_identifier is None:
|
|
person_identifier = (
|
|
PersonIdentifier.objects.filter(
|
|
user=user,
|
|
person=person,
|
|
service=service,
|
|
).first()
|
|
or PersonIdentifier.objects.filter(user=user, person=person).first()
|
|
)
|
|
if person_identifier is None and identifier and person is None:
|
|
person_identifier = PersonIdentifier.objects.filter(
|
|
user=user,
|
|
service=service,
|
|
identifier=identifier,
|
|
).first()
|
|
|
|
if person_identifier:
|
|
service = person_identifier.service
|
|
identifier = person_identifier.identifier
|
|
person = person_identifier.person
|
|
|
|
return {
|
|
"person_identifier": person_identifier,
|
|
"service": service,
|
|
"identifier": identifier,
|
|
"person": person,
|
|
}
|
|
|
|
|
|
def _compose_urls(service, identifier, person_id):
|
|
query = {"service": service, "identifier": identifier}
|
|
if person_id:
|
|
query["person"] = str(person_id)
|
|
payload = urlencode(query)
|
|
return {
|
|
"page_url": f"{reverse('compose_page')}?{payload}",
|
|
"widget_url": f"{reverse('compose_widget')}?{payload}",
|
|
"workspace_url": f"{reverse('compose_workspace')}?{payload}",
|
|
}
|
|
|
|
|
|
def _service_icon_class(service: str) -> str:
|
|
key = str(service or "").strip().lower()
|
|
if key == "signal":
|
|
return "fa-solid fa-signal"
|
|
if key == "whatsapp":
|
|
return "fa-brands fa-whatsapp"
|
|
if key == "instagram":
|
|
return "fa-brands fa-instagram"
|
|
if key == "xmpp":
|
|
return "fa-solid fa-comments"
|
|
return "fa-solid fa-address-card"
|
|
|
|
|
|
def _service_label(service: str) -> str:
|
|
key = str(service or "").strip().lower()
|
|
labels = {
|
|
"signal": "Signal",
|
|
"whatsapp": "WhatsApp",
|
|
"instagram": "Instagram",
|
|
"xmpp": "XMPP",
|
|
}
|
|
return labels.get(key, key.title() if key else "Unknown")
|
|
|
|
|
|
def _service_order(service: str) -> int:
|
|
key = str(service or "").strip().lower()
|
|
order = {
|
|
"signal": 0,
|
|
"whatsapp": 1,
|
|
"instagram": 2,
|
|
"xmpp": 3,
|
|
}
|
|
return order.get(key, 99)
|
|
|
|
|
|
def _signal_identifier_shape(value: str) -> str:
|
|
raw = str(value or "").strip()
|
|
if not raw:
|
|
return "unknown"
|
|
if SIGNAL_UUID_PATTERN.fullmatch(raw):
|
|
return "uuid"
|
|
digits = re.sub(r"[^0-9]", "", raw)
|
|
if digits and raw.replace("+", "").replace(" ", "").replace("-", "").isdigit():
|
|
return "phone"
|
|
if digits and raw.isdigit():
|
|
return "phone"
|
|
return "other"
|
|
|
|
|
|
def _manual_contact_rows(user):
|
|
rows = []
|
|
seen = set()
|
|
identifiers = (
|
|
PersonIdentifier.objects.filter(user=user)
|
|
.select_related("person")
|
|
.order_by("person__name", "service", "identifier")
|
|
)
|
|
|
|
def _normalize_contact_key(value: str) -> str:
|
|
raw = str(value or "").strip().lower()
|
|
if "@" in raw:
|
|
raw = raw.split("@", 1)[0]
|
|
return raw
|
|
|
|
def _clean_detected_name(value: str) -> str:
|
|
text = str(value or "").strip()
|
|
if not text:
|
|
return ""
|
|
if text in {"~", "-", "_"}:
|
|
return ""
|
|
return text
|
|
|
|
def add_row(
|
|
*,
|
|
service,
|
|
identifier,
|
|
person=None,
|
|
source="linked",
|
|
account="",
|
|
detected_name="",
|
|
):
|
|
service_key = _default_service(service)
|
|
identifier_value = str(identifier or "").strip()
|
|
if not identifier_value:
|
|
return
|
|
key = (service_key, identifier_value)
|
|
if key in seen:
|
|
return
|
|
seen.add(key)
|
|
urls = _compose_urls(
|
|
service_key, identifier_value, person.id if person else None
|
|
)
|
|
linked_person_name = person.name if person else ""
|
|
detected = _clean_detected_name(detected_name or account or "")
|
|
person_name = linked_person_name or detected or identifier_value
|
|
rows.append(
|
|
{
|
|
"person_name": person_name,
|
|
"linked_person_name": linked_person_name,
|
|
"person_id": str(person.id) if person else "",
|
|
"detected_name": detected,
|
|
"service": service_key,
|
|
"service_icon_class": _service_icon_class(service_key),
|
|
"identifier": identifier_value,
|
|
"compose_url": urls["page_url"],
|
|
"compose_widget_url": urls["widget_url"],
|
|
"linked_person": bool(person),
|
|
"source": source,
|
|
"match_url": (
|
|
f"{reverse('compose_contact_match')}?"
|
|
f"{urlencode({'service': service_key, 'identifier': identifier_value})}"
|
|
),
|
|
}
|
|
)
|
|
|
|
for row in identifiers:
|
|
add_row(
|
|
service=row.service,
|
|
identifier=row.identifier,
|
|
person=row.person,
|
|
source="linked",
|
|
)
|
|
|
|
signal_links = {
|
|
str(row.identifier): row
|
|
for row in (
|
|
PersonIdentifier.objects.filter(user=user, service="signal")
|
|
.select_related("person")
|
|
.order_by("id")
|
|
)
|
|
}
|
|
signal_chats = Chat.objects.all().order_by("-id")[:500]
|
|
for chat in signal_chats:
|
|
uuid_candidate = str(chat.source_uuid or "").strip()
|
|
number_candidate = str(chat.source_number or "").strip()
|
|
fallback_linked = None
|
|
if uuid_candidate:
|
|
fallback_linked = signal_links.get(uuid_candidate)
|
|
if fallback_linked is None and number_candidate:
|
|
fallback_linked = signal_links.get(number_candidate)
|
|
for candidate in (uuid_candidate, number_candidate):
|
|
if not candidate:
|
|
continue
|
|
linked = signal_links.get(candidate) or fallback_linked
|
|
add_row(
|
|
service="signal",
|
|
identifier=candidate,
|
|
person=(linked.person if linked else None),
|
|
source="signal_chat",
|
|
account=str(chat.account or ""),
|
|
detected_name=_clean_detected_name(
|
|
chat.source_name or chat.account or ""
|
|
),
|
|
)
|
|
|
|
whatsapp_links = {
|
|
str(row.identifier): row
|
|
for row in (
|
|
PersonIdentifier.objects.filter(user=user, service="whatsapp")
|
|
.select_related("person")
|
|
.order_by("id")
|
|
)
|
|
}
|
|
wa_contacts = transport.get_runtime_state("whatsapp").get("contacts") or []
|
|
wa_accounts = transport.get_runtime_state("whatsapp").get("accounts") or []
|
|
wa_account_keys = {
|
|
_normalize_contact_key(value)
|
|
for value in wa_accounts
|
|
if str(value or "").strip()
|
|
}
|
|
if isinstance(wa_contacts, list):
|
|
for item in wa_contacts:
|
|
if not isinstance(item, dict):
|
|
continue
|
|
candidate = str(item.get("identifier") or item.get("jid") or "").strip()
|
|
if not candidate:
|
|
continue
|
|
if _normalize_contact_key(candidate) in wa_account_keys:
|
|
continue
|
|
detected_name = _clean_detected_name(
|
|
item.get("name") or item.get("chat") or ""
|
|
)
|
|
if detected_name.lower() == "linked account":
|
|
continue
|
|
linked = whatsapp_links.get(candidate)
|
|
if linked is None and "@" in candidate:
|
|
linked = whatsapp_links.get(candidate.split("@", 1)[0])
|
|
add_row(
|
|
service="whatsapp",
|
|
identifier=candidate,
|
|
person=(linked.person if linked else None),
|
|
source="whatsapp_runtime",
|
|
account=detected_name,
|
|
detected_name=detected_name,
|
|
)
|
|
|
|
rows.sort(
|
|
key=lambda row: (
|
|
0 if row.get("linked_person") else 1,
|
|
row["person_name"].lower(),
|
|
_service_order(row.get("service")),
|
|
row["identifier"],
|
|
)
|
|
)
|
|
return rows
|
|
|
|
|
|
def _recent_manual_contacts(
|
|
user,
|
|
*,
|
|
current_service: str,
|
|
current_identifier: str,
|
|
current_person: Person | None,
|
|
limit: int = 12,
|
|
):
|
|
all_rows = _manual_contact_rows(user)
|
|
if not all_rows:
|
|
return []
|
|
|
|
current_service_key = _default_service(current_service)
|
|
current_identifier_value = str(current_identifier or "").strip()
|
|
current_person_id = str(current_person.id) if current_person else ""
|
|
|
|
row_by_key = {
|
|
(
|
|
str(row.get("service") or "").strip().lower(),
|
|
str(row.get("identifier") or "").strip(),
|
|
): row
|
|
for row in all_rows
|
|
}
|
|
by_person_service = {}
|
|
person_links = (
|
|
PersonIdentifier.objects.filter(user=user)
|
|
.select_related("person")
|
|
.order_by("person__name", "service", "identifier")
|
|
)
|
|
for link in person_links:
|
|
person_id = str(link.person_id or "")
|
|
if not person_id:
|
|
continue
|
|
service_key = _default_service(link.service)
|
|
identifier_value = str(link.identifier or "").strip()
|
|
if not identifier_value:
|
|
continue
|
|
by_person_service.setdefault(person_id, {})
|
|
if service_key not in by_person_service[person_id]:
|
|
by_person_service[person_id][service_key] = {
|
|
"identifier": identifier_value,
|
|
"person_name": str(link.person.name or "").strip() or identifier_value,
|
|
}
|
|
ordered_keys = []
|
|
seen_keys = set()
|
|
recent_values = (
|
|
Message.objects.filter(
|
|
user=user,
|
|
session__identifier__isnull=False,
|
|
)
|
|
.values_list(
|
|
"session__identifier__service",
|
|
"session__identifier__identifier",
|
|
)
|
|
.order_by("-ts", "-id")[:1000]
|
|
)
|
|
for service_value, identifier_value in recent_values:
|
|
key = (
|
|
_default_service(service_value),
|
|
str(identifier_value or "").strip(),
|
|
)
|
|
if not key[1] or key in seen_keys:
|
|
continue
|
|
seen_keys.add(key)
|
|
ordered_keys.append(key)
|
|
if len(ordered_keys) >= limit:
|
|
break
|
|
|
|
current_key = (current_service_key, current_identifier_value)
|
|
if current_key[1]:
|
|
if current_key in ordered_keys:
|
|
ordered_keys.remove(current_key)
|
|
ordered_keys.insert(0, current_key)
|
|
|
|
rows = []
|
|
seen_people = set()
|
|
seen_unknown = set()
|
|
for service_key, identifier_value in ordered_keys:
|
|
row = dict(row_by_key.get((service_key, identifier_value)) or {})
|
|
if not row:
|
|
urls = _compose_urls(
|
|
service_key,
|
|
identifier_value,
|
|
current_person.id if current_person else None,
|
|
)
|
|
row = {
|
|
"person_name": identifier_value,
|
|
"linked_person_name": "",
|
|
"detected_name": "",
|
|
"service": service_key,
|
|
"service_icon_class": _service_icon_class(service_key),
|
|
"identifier": identifier_value,
|
|
"compose_url": urls["page_url"],
|
|
"compose_widget_url": urls["widget_url"],
|
|
"linked_person": False,
|
|
"source": "recent",
|
|
}
|
|
row["person_id"] = str(row.get("person_id") or "")
|
|
person_id = row["person_id"]
|
|
if person_id:
|
|
if person_id in seen_people:
|
|
continue
|
|
seen_people.add(person_id)
|
|
service_map = dict(by_person_service.get(person_id) or {})
|
|
if service_key not in service_map and identifier_value:
|
|
service_map[service_key] = {
|
|
"identifier": identifier_value,
|
|
"person_name": str(row.get("person_name") or "").strip()
|
|
or identifier_value,
|
|
}
|
|
|
|
selected_service = service_key
|
|
selected_identifier = identifier_value
|
|
if person_id == current_person_id and current_service_key in service_map:
|
|
selected_service = current_service_key
|
|
selected_identifier = str(
|
|
(service_map.get(current_service_key) or {}).get("identifier") or ""
|
|
).strip()
|
|
elif selected_service not in service_map:
|
|
for fallback_service in ("whatsapp", "signal", "instagram", "xmpp"):
|
|
if fallback_service in service_map:
|
|
selected_service = fallback_service
|
|
selected_identifier = str(
|
|
(service_map.get(fallback_service) or {}).get("identifier")
|
|
or ""
|
|
).strip()
|
|
break
|
|
selected_identifier = selected_identifier or identifier_value
|
|
selected_urls = _compose_urls(
|
|
selected_service,
|
|
selected_identifier,
|
|
person_id,
|
|
)
|
|
|
|
row["service"] = selected_service
|
|
row["service_label"] = _service_label(selected_service)
|
|
row["identifier"] = selected_identifier
|
|
row["compose_url"] = selected_urls["page_url"]
|
|
row["compose_widget_url"] = selected_urls["widget_url"]
|
|
row["person_name"] = (
|
|
str(row.get("linked_person_name") or "").strip()
|
|
or str(row.get("person_name") or "").strip()
|
|
or selected_identifier
|
|
)
|
|
|
|
for svc in ("signal", "whatsapp", "instagram", "xmpp"):
|
|
svc_identifier = str(
|
|
(service_map.get(svc) or {}).get("identifier") or ""
|
|
).strip()
|
|
row[f"{svc}_identifier"] = svc_identifier
|
|
if svc_identifier:
|
|
svc_urls = _compose_urls(svc, svc_identifier, person_id)
|
|
row[f"{svc}_compose_url"] = svc_urls["page_url"]
|
|
row[f"{svc}_compose_widget_url"] = svc_urls["widget_url"]
|
|
else:
|
|
row[f"{svc}_compose_url"] = ""
|
|
row[f"{svc}_compose_widget_url"] = ""
|
|
else:
|
|
unknown_key = (service_key, identifier_value)
|
|
if unknown_key in seen_unknown:
|
|
continue
|
|
seen_unknown.add(unknown_key)
|
|
row["service_label"] = _service_label(service_key)
|
|
for svc in ("signal", "whatsapp", "instagram", "xmpp"):
|
|
row[f"{svc}_identifier"] = (
|
|
identifier_value if svc == service_key else ""
|
|
)
|
|
row[f"{svc}_compose_url"] = (
|
|
row.get("compose_url") if svc == service_key else ""
|
|
)
|
|
row[f"{svc}_compose_widget_url"] = (
|
|
row.get("compose_widget_url") if svc == service_key else ""
|
|
)
|
|
|
|
row["is_active"] = (
|
|
row.get("service") == current_service_key
|
|
and str(row.get("identifier") or "").strip() == current_identifier_value
|
|
)
|
|
rows.append(row)
|
|
if len(rows) >= limit:
|
|
break
|
|
return rows
|
|
|
|
|
|
def _name_for_match(value: str) -> str:
|
|
lowered = re.sub(r"[^a-z0-9]+", " ", str(value or "").strip().lower())
|
|
return re.sub(r"\s+", " ", lowered).strip()
|
|
|
|
|
|
def _suggest_people_for_candidate(candidate: dict, people: list[Person]) -> list[dict]:
|
|
if not people:
|
|
return []
|
|
base_name = str(candidate.get("detected_name") or "").strip()
|
|
if not base_name:
|
|
return []
|
|
base_norm = _name_for_match(base_name)
|
|
if not base_norm:
|
|
return []
|
|
|
|
scored = []
|
|
base_tokens = {token for token in base_norm.split(" ") if token}
|
|
for person in people:
|
|
person_norm = _name_for_match(person.name)
|
|
if not person_norm:
|
|
continue
|
|
ratio = SequenceMatcher(None, base_norm, person_norm).ratio()
|
|
person_tokens = {token for token in person_norm.split(" ") if token}
|
|
overlap = 0.0
|
|
if base_tokens and person_tokens:
|
|
overlap = len(base_tokens & person_tokens) / max(
|
|
len(base_tokens), len(person_tokens)
|
|
)
|
|
score = max(ratio, overlap)
|
|
if score < 0.62:
|
|
continue
|
|
scored.append(
|
|
{
|
|
"person": person,
|
|
"score": score,
|
|
}
|
|
)
|
|
|
|
scored.sort(key=lambda item: item["score"], reverse=True)
|
|
return scored[:3]
|
|
|
|
|
|
def _load_messages(user, person_identifier, limit):
|
|
if person_identifier is None:
|
|
return {"session": None, "messages": []}
|
|
|
|
session, _ = ChatSession.objects.get_or_create(
|
|
user=user,
|
|
identifier=person_identifier,
|
|
)
|
|
messages = list(
|
|
Message.objects.filter(user=user, session=session)
|
|
.select_related("session", "session__identifier", "session__identifier__person")
|
|
.order_by("-ts")[:limit]
|
|
)
|
|
messages.reverse()
|
|
return {"session": session, "messages": messages}
|
|
|
|
|
|
def _panel_context(
|
|
request,
|
|
service: str,
|
|
identifier: str,
|
|
person: Person | None,
|
|
render_mode: str,
|
|
notice: str = "",
|
|
level: str = "success",
|
|
):
|
|
base = _context_base(request.user, service, identifier, person)
|
|
limit = _safe_limit(request.GET.get("limit") or request.POST.get("limit"))
|
|
session_bundle = _load_messages(request.user, base["person_identifier"], limit)
|
|
conversation = _workspace_conversation_for_person(request.user, base["person"])
|
|
counterpart_identifiers = _counterpart_identifiers_for_person(
|
|
request.user, base["person"]
|
|
)
|
|
serialized_messages = _serialize_messages_with_artifacts(
|
|
session_bundle["messages"],
|
|
counterpart_identifiers=counterpart_identifiers,
|
|
conversation=conversation,
|
|
)
|
|
glance_items = _build_glance_items(
|
|
serialized_messages,
|
|
person_id=(base["person"].id if base["person"] else None),
|
|
)
|
|
last_ts = 0
|
|
if session_bundle["messages"]:
|
|
last_ts = int(session_bundle["messages"][-1].ts or 0)
|
|
urls = _compose_urls(
|
|
base["service"],
|
|
base["identifier"],
|
|
base["person"].id if base["person"] else None,
|
|
)
|
|
ws_token = _compose_ws_token(
|
|
user_id=request.user.id,
|
|
service=base["service"],
|
|
identifier=base["identifier"],
|
|
person_id=base["person"].id if base["person"] else None,
|
|
)
|
|
ws_url = ""
|
|
if bool(getattr(settings, "COMPOSE_WS_ENABLED", False)):
|
|
ws_url = f"/ws/compose/thread/?{urlencode({'token': ws_token})}"
|
|
|
|
platform_options = []
|
|
if base["person"] is not None:
|
|
linked_identifiers = list(
|
|
PersonIdentifier.objects.filter(
|
|
user=request.user,
|
|
person=base["person"],
|
|
).order_by("service", "id")
|
|
)
|
|
by_service = {}
|
|
for row in linked_identifiers:
|
|
service_key = _default_service(row.service)
|
|
identifier_value = str(row.identifier or "").strip()
|
|
if not identifier_value:
|
|
continue
|
|
if service_key not in by_service:
|
|
by_service[service_key] = identifier_value
|
|
if base["service"] and base["identifier"]:
|
|
by_service[base["service"]] = base["identifier"]
|
|
|
|
for service_key in sorted(by_service.keys(), key=_service_order):
|
|
identifier_value = by_service[service_key]
|
|
option_urls = _compose_urls(
|
|
service_key, identifier_value, base["person"].id
|
|
)
|
|
platform_options.append(
|
|
{
|
|
"service": service_key,
|
|
"service_label": _service_label(service_key),
|
|
"identifier": identifier_value,
|
|
"person_id": str(base["person"].id),
|
|
"page_url": option_urls["page_url"],
|
|
"widget_url": option_urls["widget_url"],
|
|
"is_active": (
|
|
service_key == base["service"]
|
|
and identifier_value == base["identifier"]
|
|
),
|
|
}
|
|
)
|
|
elif base["identifier"]:
|
|
option_urls = _compose_urls(base["service"], base["identifier"], None)
|
|
platform_options.append(
|
|
{
|
|
"service": base["service"],
|
|
"service_label": _service_label(base["service"]),
|
|
"identifier": base["identifier"],
|
|
"person_id": "",
|
|
"page_url": option_urls["page_url"],
|
|
"widget_url": option_urls["widget_url"],
|
|
"is_active": True,
|
|
}
|
|
)
|
|
|
|
unique_raw = (
|
|
f"{base['service']}|{base['identifier']}|{request.user.id}|{time.time_ns()}"
|
|
)
|
|
unique = hashlib.sha1(unique_raw.encode("utf-8")).hexdigest()[:12]
|
|
typing_state = get_person_typing_state(
|
|
user_id=request.user.id,
|
|
person_id=base["person"].id if base["person"] else None,
|
|
)
|
|
recent_contacts = _recent_manual_contacts(
|
|
request.user,
|
|
current_service=base["service"],
|
|
current_identifier=base["identifier"],
|
|
current_person=base["person"],
|
|
limit=12,
|
|
)
|
|
|
|
return {
|
|
"service": base["service"],
|
|
"identifier": base["identifier"],
|
|
"person": base["person"],
|
|
"person_identifier": base["person_identifier"],
|
|
"session": session_bundle["session"],
|
|
"messages": session_bundle["messages"],
|
|
"serialized_messages": serialized_messages,
|
|
"glance_items": glance_items,
|
|
"glance_items_json": json.dumps(glance_items),
|
|
"last_ts": last_ts,
|
|
"limit": limit,
|
|
"notice_message": notice,
|
|
"notice_level": level,
|
|
"render_mode": render_mode,
|
|
"compose_page_url": urls["page_url"],
|
|
"compose_widget_url": urls["widget_url"],
|
|
"compose_workspace_url": (
|
|
f"{urls['workspace_url']}&{urlencode({'limit': limit})}"
|
|
),
|
|
"compose_drafts_url": reverse("compose_drafts"),
|
|
"compose_summary_url": reverse("compose_summary"),
|
|
"compose_engage_preview_url": reverse("compose_engage_preview"),
|
|
"compose_engage_send_url": reverse("compose_engage_send"),
|
|
"compose_quick_insights_url": reverse("compose_quick_insights"),
|
|
"compose_history_sync_url": reverse("compose_history_sync"),
|
|
"compose_ws_url": ws_url,
|
|
"ai_workspace_url": (
|
|
f"{reverse('ai_workspace')}?person={base['person'].id}"
|
|
if base["person"]
|
|
else reverse("ai_workspace")
|
|
),
|
|
"ai_workspace_widget_url": (
|
|
(
|
|
f"{reverse('ai_workspace_person', kwargs={'type': 'widget', 'person_id': base['person'].id})}"
|
|
f"?{urlencode({'limit': limit})}"
|
|
)
|
|
if base["person"]
|
|
else ""
|
|
),
|
|
"manual_icon_class": "fa-solid fa-paper-plane",
|
|
"panel_id": f"compose-panel-{unique}",
|
|
"typing_state_json": json.dumps(typing_state),
|
|
"platform_options": platform_options,
|
|
"recent_contacts": recent_contacts,
|
|
}
|
|
|
|
|
|
class ComposeContactsDropdown(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
all_value = str(request.GET.get("all") or "").strip().lower()
|
|
fetch_all = all_value in {"1", "true", "yes", "y", "all"}
|
|
preview_limit = 10
|
|
contact_rows = _manual_contact_rows(request.user)
|
|
rows = contact_rows if fetch_all else contact_rows[:preview_limit]
|
|
return render(
|
|
request,
|
|
"partials/nav-contacts-dropdown.html",
|
|
{
|
|
"items": rows,
|
|
"manual_icon_class": "fa-solid fa-paper-plane",
|
|
"is_preview": not fetch_all,
|
|
"fetch_contacts_url": f"{reverse('compose_contacts_dropdown')}?all=1",
|
|
"match_url": reverse("compose_contact_match"),
|
|
},
|
|
)
|
|
|
|
|
|
class ComposeWorkspace(LoginRequiredMixin, View):
|
|
template_name = "pages/compose-workspace.html"
|
|
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = Person.objects.filter(id=person_id, user=request.user).first()
|
|
limit = _safe_limit(request.GET.get("limit") or 40)
|
|
|
|
initial_widget_url = ""
|
|
if identifier or person is not None:
|
|
base = _context_base(request.user, service, identifier, person)
|
|
if base["identifier"]:
|
|
urls = _compose_urls(
|
|
base["service"],
|
|
base["identifier"],
|
|
base["person"].id if base["person"] else None,
|
|
)
|
|
initial_widget_url = (
|
|
f"{urls['widget_url']}&{urlencode({'limit': limit})}"
|
|
)
|
|
|
|
contacts_widget_url = (
|
|
f"{reverse('compose_workspace_contacts_widget')}"
|
|
f"?{urlencode({'limit': limit})}"
|
|
)
|
|
context = {
|
|
"contacts_widget_url": contacts_widget_url,
|
|
"initial_widget_url": initial_widget_url,
|
|
}
|
|
return render(request, self.template_name, context)
|
|
|
|
|
|
class ComposeWorkspaceContactsWidget(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
limit = _safe_limit(request.GET.get("limit") or 40)
|
|
contact_rows = _manual_contact_rows(request.user)
|
|
context = {
|
|
"title": "Manual Workspace",
|
|
"unique": "compose-workspace-contacts",
|
|
"window_content": "partials/compose-workspace-contacts-widget.html",
|
|
"widget_options": 'gs-w="4" gs-h="14" gs-x="0" gs-y="0" gs-min-w="3"',
|
|
"contact_rows": contact_rows,
|
|
"limit": limit,
|
|
"limit_options": [20, 40, 60, 100, 200],
|
|
"manual_icon_class": "fa-solid fa-paper-plane",
|
|
}
|
|
return render(request, "mixins/wm/widget.html", context)
|
|
|
|
|
|
class ComposeContactMatch(LoginRequiredMixin, View):
|
|
template_name = "pages/compose-contact-match.html"
|
|
|
|
def _service_choices(self):
|
|
return [
|
|
("signal", "Signal"),
|
|
("whatsapp", "WhatsApp"),
|
|
("instagram", "Instagram"),
|
|
("xmpp", "XMPP"),
|
|
]
|
|
|
|
def _context(self, request, notice="", level="info"):
|
|
people_qs = (
|
|
Person.objects.filter(user=request.user)
|
|
.prefetch_related("personidentifier_set")
|
|
.order_by("name")
|
|
)
|
|
people = list(people_qs)
|
|
candidates = _manual_contact_rows(request.user)
|
|
for row in candidates:
|
|
row["suggestions"] = []
|
|
if row.get("linked_person"):
|
|
continue
|
|
row["suggestions"] = _suggest_people_for_candidate(row, people)
|
|
return {
|
|
"people": people,
|
|
"candidates": candidates,
|
|
"service_choices": self._service_choices(),
|
|
"notice_message": notice,
|
|
"notice_level": level,
|
|
"prefill_service": _default_service(request.GET.get("service")),
|
|
"prefill_identifier": str(request.GET.get("identifier") or "").strip(),
|
|
}
|
|
|
|
def get(self, request):
|
|
return render(request, self.template_name, self._context(request))
|
|
|
|
def _signal_companion_identifiers(self, identifier: str) -> set[str]:
|
|
value = str(identifier or "").strip()
|
|
if not value:
|
|
return set()
|
|
source_shape = _signal_identifier_shape(value)
|
|
companions = set()
|
|
signal_rows = Chat.objects.filter(source_uuid=value) | Chat.objects.filter(
|
|
source_number=value
|
|
)
|
|
for chat in signal_rows.order_by("-id")[:1000]:
|
|
for candidate in (chat.source_uuid, chat.source_number):
|
|
cleaned = str(candidate or "").strip()
|
|
if not cleaned or cleaned == value:
|
|
continue
|
|
# Keep auto-linking conservative: only same-shape companions.
|
|
if source_shape != "other":
|
|
candidate_shape = _signal_identifier_shape(cleaned)
|
|
if candidate_shape != source_shape:
|
|
continue
|
|
companions.add(cleaned)
|
|
return companions
|
|
|
|
def post(self, request):
|
|
person_id = str(request.POST.get("person_id") or "").strip()
|
|
person_name = str(request.POST.get("person_name") or "").strip()
|
|
service = _default_service(request.POST.get("service"))
|
|
identifier = str(request.POST.get("identifier") or "").strip()
|
|
if not identifier:
|
|
return render(
|
|
request,
|
|
self.template_name,
|
|
self._context(request, "Identifier is required.", "warning"),
|
|
)
|
|
person = None
|
|
if person_id:
|
|
person = Person.objects.filter(id=person_id, user=request.user).first()
|
|
if person is None and person_name:
|
|
person = Person.objects.create(user=request.user, name=person_name)
|
|
if person is None:
|
|
return render(
|
|
request,
|
|
self.template_name,
|
|
self._context(request, "Select a person or create one.", "warning"),
|
|
)
|
|
|
|
row = PersonIdentifier.objects.filter(
|
|
user=request.user,
|
|
service=service,
|
|
identifier=identifier,
|
|
).first()
|
|
if row is None:
|
|
PersonIdentifier.objects.create(
|
|
user=request.user,
|
|
person=person,
|
|
service=service,
|
|
identifier=identifier,
|
|
)
|
|
message = f"Linked {identifier} ({service}) to {person.name}."
|
|
else:
|
|
if row.person_id != person.id:
|
|
row.person = person
|
|
row.save(update_fields=["person"])
|
|
message = f"Re-linked {identifier} ({service}) to {person.name}."
|
|
else:
|
|
message = (
|
|
f"{identifier} ({service}) is already linked to {person.name}."
|
|
)
|
|
|
|
linked_companions = 0
|
|
skipped_companions = 0
|
|
if service == "signal":
|
|
companions = self._signal_companion_identifiers(identifier)
|
|
for candidate in companions:
|
|
existing = PersonIdentifier.objects.filter(
|
|
user=request.user,
|
|
service="signal",
|
|
identifier=candidate,
|
|
).first()
|
|
if existing is None:
|
|
PersonIdentifier.objects.create(
|
|
user=request.user,
|
|
person=person,
|
|
service="signal",
|
|
identifier=candidate,
|
|
)
|
|
linked_companions += 1
|
|
continue
|
|
if existing.person_id != person.id:
|
|
skipped_companions += 1
|
|
if linked_companions:
|
|
message = (
|
|
f"{message} Added {linked_companions} companion Signal identifier"
|
|
f"{'' if linked_companions == 1 else 's'}."
|
|
)
|
|
if skipped_companions:
|
|
message = (
|
|
f"{message} Skipped {skipped_companions} companion identifier"
|
|
f"{'' if skipped_companions == 1 else 's'} already linked to another person."
|
|
)
|
|
return render(
|
|
request,
|
|
self.template_name,
|
|
self._context(request, message, "success"),
|
|
)
|
|
|
|
|
|
class ComposePage(LoginRequiredMixin, View):
|
|
template_name = "pages/compose.html"
|
|
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return HttpResponseBadRequest("Missing contact identifier.")
|
|
|
|
context = _panel_context(
|
|
request=request,
|
|
service=service,
|
|
identifier=identifier,
|
|
person=person,
|
|
render_mode="page",
|
|
)
|
|
return render(request, self.template_name, context)
|
|
|
|
|
|
class ComposeWidget(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return HttpResponseBadRequest("Missing contact identifier.")
|
|
|
|
panel_context = _panel_context(
|
|
request=request,
|
|
service=service,
|
|
identifier=identifier,
|
|
person=person,
|
|
render_mode="widget",
|
|
)
|
|
title_name = (
|
|
panel_context["person"].name
|
|
if panel_context["person"] is not None
|
|
else panel_context["identifier"]
|
|
)
|
|
context = {
|
|
"title": f"Manual Chat: {title_name}",
|
|
"unique": f"compose-{panel_context['panel_id']}",
|
|
"window_content": "partials/compose-panel.html",
|
|
"widget_options": 'gs-w="6" gs-h="12" gs-x="0" gs-y="0" gs-min-w="4"',
|
|
**panel_context,
|
|
}
|
|
return render(request, "mixins/wm/widget.html", context)
|
|
|
|
|
|
class ComposeThread(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return HttpResponseBadRequest("Missing contact identifier.")
|
|
|
|
limit = _safe_limit(request.GET.get("limit") or 60)
|
|
after_ts = _safe_after_ts(request.GET.get("after_ts"))
|
|
base = _context_base(request.user, service, identifier, person)
|
|
latest_ts = after_ts
|
|
messages = []
|
|
seed_previous = None
|
|
session_ids = ComposeHistorySync._session_ids_for_scope(
|
|
user=request.user,
|
|
person=base["person"],
|
|
service=base["service"],
|
|
person_identifier=base["person_identifier"],
|
|
explicit_identifier=base["identifier"],
|
|
)
|
|
if base["person_identifier"] is not None:
|
|
session, _ = ChatSession.objects.get_or_create(
|
|
user=request.user,
|
|
identifier=base["person_identifier"],
|
|
)
|
|
# Don't convert UUIDs to int; keep them as UUIDs for the filter query
|
|
session_ids = list({*session_ids, session.id})
|
|
if session_ids:
|
|
base_queryset = Message.objects.filter(
|
|
user=request.user,
|
|
session_id__in=session_ids,
|
|
)
|
|
queryset = base_queryset
|
|
if after_ts > 0:
|
|
seed_previous = (
|
|
base_queryset.filter(ts__lte=after_ts).order_by("-ts").first()
|
|
)
|
|
# Use a small rolling window to capture late/out-of-order timestamps.
|
|
# Client-side dedupe by message id prevents duplicate rendering.
|
|
window_start = max(0, int(after_ts) - 5 * 60 * 1000)
|
|
queryset = queryset.filter(ts__gte=window_start)
|
|
rows_desc = list(
|
|
queryset.select_related(
|
|
"session",
|
|
"session__identifier",
|
|
"session__identifier__person",
|
|
).order_by("-ts")[:limit]
|
|
)
|
|
rows_desc.reverse()
|
|
messages = rows_desc
|
|
newest = (
|
|
Message.objects.filter(
|
|
user=request.user,
|
|
session_id__in=session_ids,
|
|
)
|
|
.order_by("-ts")
|
|
.values_list("ts", flat=True)
|
|
.first()
|
|
)
|
|
if newest:
|
|
latest_ts = max(latest_ts, int(newest))
|
|
conversation = _workspace_conversation_for_person(request.user, base["person"])
|
|
counterpart_identifiers = _counterpart_identifiers_for_person(
|
|
request.user, base["person"]
|
|
)
|
|
payload = {
|
|
"messages": _serialize_messages_with_artifacts(
|
|
messages,
|
|
counterpart_identifiers=counterpart_identifiers,
|
|
conversation=conversation,
|
|
seed_previous=seed_previous,
|
|
),
|
|
"last_ts": latest_ts,
|
|
"typing": get_person_typing_state(
|
|
user_id=request.user.id,
|
|
person_id=base["person"].id if base["person"] else None,
|
|
),
|
|
}
|
|
return JsonResponse(payload)
|
|
|
|
|
|
class ComposeHistorySync(LoginRequiredMixin, View):
|
|
@staticmethod
|
|
def _session_ids_for_identifier(user, person_identifier):
|
|
if person_identifier is None:
|
|
return []
|
|
return list(
|
|
ChatSession.objects.filter(
|
|
user=user,
|
|
identifier=person_identifier,
|
|
).values_list("id", flat=True)
|
|
)
|
|
|
|
@staticmethod
|
|
def _identifier_variants(service: str, identifier: str):
|
|
raw = str(identifier or "").strip()
|
|
if not raw:
|
|
return []
|
|
values = {raw}
|
|
if service == "whatsapp":
|
|
digits = re.sub(r"[^0-9]", "", raw)
|
|
if digits:
|
|
values.add(digits)
|
|
values.add(f"+{digits}")
|
|
values.add(f"{digits}@s.whatsapp.net")
|
|
if "@" in raw:
|
|
local = raw.split("@", 1)[0].strip()
|
|
if local:
|
|
values.add(local)
|
|
elif service == "signal":
|
|
# Signal identifiers can be UUID or phone number
|
|
digits = re.sub(r"[^0-9]", "", raw)
|
|
if digits and not raw.count("-") >= 4:
|
|
# Likely a phone number; add variants
|
|
values.add(digits)
|
|
values.add(f"+{digits}")
|
|
# If it looks like a UUID (has hyphens), keep only the original format
|
|
# Signal UUIDs are strict and don't have variants
|
|
return [value for value in values if value]
|
|
|
|
@classmethod
|
|
def _session_ids_for_scope(
|
|
cls,
|
|
user,
|
|
person,
|
|
service: str,
|
|
person_identifier,
|
|
explicit_identifier: str,
|
|
):
|
|
identifiers = []
|
|
if person_identifier is not None:
|
|
identifiers.append(person_identifier)
|
|
if person is not None:
|
|
identifiers.extend(
|
|
list(
|
|
PersonIdentifier.objects.filter(
|
|
user=user,
|
|
person=person,
|
|
service=service,
|
|
)
|
|
)
|
|
)
|
|
variants = cls._identifier_variants(service, explicit_identifier)
|
|
if variants:
|
|
variant_qs = PersonIdentifier.objects.filter(
|
|
user=user,
|
|
service=service,
|
|
identifier__in=variants,
|
|
)
|
|
if person is not None:
|
|
variant_qs = variant_qs.filter(person=person)
|
|
identifiers.extend(list(variant_qs))
|
|
unique_ids = []
|
|
seen = set()
|
|
for row in identifiers:
|
|
row_id = int(row.id)
|
|
if row_id in seen:
|
|
continue
|
|
seen.add(row_id)
|
|
unique_ids.append(row_id)
|
|
if not unique_ids:
|
|
return []
|
|
result = list(
|
|
ChatSession.objects.filter(
|
|
user=user,
|
|
identifier_id__in=unique_ids,
|
|
).values_list("id", flat=True)
|
|
)
|
|
return result
|
|
|
|
@staticmethod
|
|
def _reconcile_duplicate_messages(user, session_ids):
|
|
if not session_ids:
|
|
return 0
|
|
rows = list(
|
|
Message.objects.filter(
|
|
user=user,
|
|
session_id__in=session_ids,
|
|
)
|
|
.order_by("id")
|
|
.values("id", "session_id", "ts", "sender_uuid", "text", "custom_author")
|
|
)
|
|
seen = {}
|
|
duplicate_ids = []
|
|
for row in rows:
|
|
dedupe_key = (
|
|
int(row.get("session_id") or 0),
|
|
int(row.get("ts") or 0),
|
|
str(row.get("sender_uuid") or ""),
|
|
str(row.get("text") or ""),
|
|
str(row.get("custom_author") or ""),
|
|
)
|
|
if dedupe_key in seen:
|
|
duplicate_ids.append(row["id"])
|
|
continue
|
|
seen[dedupe_key] = row["id"]
|
|
if not duplicate_ids:
|
|
return 0
|
|
Message.objects.filter(user=user, id__in=duplicate_ids).delete()
|
|
return len(duplicate_ids)
|
|
|
|
def post(self, request):
|
|
service = _default_service(request.POST.get("service"))
|
|
identifier = str(request.POST.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.POST.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"message": "Missing contact identifier.",
|
|
"level": "danger",
|
|
}
|
|
)
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
if base["person_identifier"] is None:
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"message": "No linked identifier for this contact yet.",
|
|
"level": "warning",
|
|
}
|
|
)
|
|
|
|
session_ids = self._session_ids_for_scope(
|
|
user=request.user,
|
|
person=base["person"],
|
|
service=base["service"],
|
|
person_identifier=base["person_identifier"],
|
|
explicit_identifier=base["identifier"],
|
|
)
|
|
before_count = 0
|
|
if session_ids:
|
|
before_count = Message.objects.filter(
|
|
user=request.user,
|
|
session_id__in=session_ids,
|
|
).count()
|
|
|
|
runtime_result = {}
|
|
if base["service"] == "whatsapp":
|
|
command_id = transport.enqueue_runtime_command(
|
|
"whatsapp",
|
|
"force_history_sync",
|
|
{
|
|
"identifier": base["identifier"],
|
|
"person_id": str(base["person"].id) if base["person"] else "",
|
|
},
|
|
)
|
|
runtime_result = async_to_sync(transport.wait_runtime_command_result)(
|
|
"whatsapp",
|
|
command_id,
|
|
timeout=25,
|
|
)
|
|
if runtime_result is None:
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"message": (
|
|
"History sync timed out. Runtime may still be processing; "
|
|
"watch Runtime Debug and retry."
|
|
),
|
|
"level": "warning",
|
|
}
|
|
)
|
|
if not runtime_result.get("ok"):
|
|
error_text = str(runtime_result.get("error") or "history_sync_failed")
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"message": f"History sync failed: {error_text}",
|
|
"level": "danger",
|
|
}
|
|
)
|
|
else:
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"message": (
|
|
f"Force history sync is only available for WhatsApp right now "
|
|
f"(current: {base['service']})."
|
|
),
|
|
"level": "warning",
|
|
}
|
|
)
|
|
|
|
session_ids = self._session_ids_for_scope(
|
|
user=request.user,
|
|
person=base["person"],
|
|
service=base["service"],
|
|
person_identifier=base["person_identifier"],
|
|
explicit_identifier=base["identifier"],
|
|
)
|
|
raw_after_count = 0
|
|
if session_ids:
|
|
raw_after_count = Message.objects.filter(
|
|
user=request.user,
|
|
session_id__in=session_ids,
|
|
).count()
|
|
dedup_removed = self._reconcile_duplicate_messages(request.user, session_ids)
|
|
after_count = raw_after_count
|
|
if dedup_removed > 0:
|
|
after_count = Message.objects.filter(
|
|
user=request.user,
|
|
session_id__in=session_ids,
|
|
).count()
|
|
|
|
imported_count = max(0, int(raw_after_count) - int(before_count))
|
|
net_new_count = max(0, int(after_count) - int(before_count))
|
|
delta = max(0, int(after_count) - int(before_count))
|
|
if delta > 0:
|
|
detail = []
|
|
if imported_count:
|
|
detail.append(f"imported {imported_count}")
|
|
if dedup_removed:
|
|
detail.append(f"reconciled {dedup_removed} duplicate(s)")
|
|
suffix = f" ({', '.join(detail)})" if detail else ""
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"message": f"History sync complete. Net +{net_new_count} message(s){suffix}.",
|
|
"level": "success",
|
|
"new_messages": net_new_count,
|
|
"imported_messages": imported_count,
|
|
"reconciled_duplicates": dedup_removed,
|
|
"before": before_count,
|
|
"after": after_count,
|
|
"runtime_result": runtime_result,
|
|
}
|
|
)
|
|
if dedup_removed > 0:
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"message": (
|
|
f"History sync complete. Reconciled {dedup_removed} duplicate message(s)."
|
|
),
|
|
"level": "success",
|
|
"new_messages": 0,
|
|
"imported_messages": imported_count,
|
|
"reconciled_duplicates": dedup_removed,
|
|
"before": before_count,
|
|
"after": after_count,
|
|
"runtime_result": runtime_result,
|
|
}
|
|
)
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"message": (
|
|
(
|
|
"History sync completed, but this WhatsApp runtime session does not expose "
|
|
"message text history yet "
|
|
f"({str(runtime_result.get('sqlite_error') or 'no_message_history_source')}). "
|
|
"Live incoming/outgoing messages will continue to sync."
|
|
)
|
|
if str(runtime_result.get("sqlite_error") or "").strip()
|
|
else "History sync completed. No new messages were found yet; retry in a few seconds."
|
|
),
|
|
"level": "info",
|
|
"new_messages": 0,
|
|
"imported_messages": imported_count,
|
|
"reconciled_duplicates": dedup_removed,
|
|
"before": before_count,
|
|
"after": after_count,
|
|
"runtime_result": runtime_result,
|
|
}
|
|
)
|
|
|
|
|
|
class ComposeCancelSend(LoginRequiredMixin, View):
|
|
def post(self, request):
|
|
service = _default_service(request.POST.get("service"))
|
|
identifier = str(request.POST.get("identifier") or "").strip()
|
|
command_id = str(request.POST.get("command_id") or "").strip()
|
|
if not identifier:
|
|
return JsonResponse({"ok": False, "error": "missing_identifier"})
|
|
# If a specific command_id is supplied, cancel that command only.
|
|
if command_id:
|
|
ok = transport.cancel_runtime_command(service, command_id)
|
|
return JsonResponse({"ok": True, "cancelled": [command_id] if ok else []})
|
|
cancelled = transport.cancel_runtime_commands_for_recipient(service, identifier)
|
|
return JsonResponse({"ok": True, "cancelled": cancelled})
|
|
|
|
|
|
class ComposeCommandResult(LoginRequiredMixin, View):
|
|
"""Return the runtime command result for a queued send (if available).
|
|
|
|
GET parameters: `service`, `command_id`.
|
|
Returns JSON: if pending -> {"pending": True}, else returns the result dict.
|
|
"""
|
|
|
|
def get(self, request):
|
|
timeout_s = 30.0
|
|
force_json = str(request.GET.get("format") or "").strip().lower() == "json"
|
|
is_hx_request = (
|
|
str(request.headers.get("HX-Request") or "").strip().lower() == "true"
|
|
) and not force_json
|
|
service = _default_service(request.GET.get("service"))
|
|
command_id = str(request.GET.get("command_id") or "").strip()
|
|
if not command_id:
|
|
if is_hx_request:
|
|
return render(
|
|
request,
|
|
"partials/compose-send-status.html",
|
|
{
|
|
"notice_message": "Missing command id.",
|
|
"notice_level": "warning",
|
|
},
|
|
)
|
|
return JsonResponse(
|
|
{"ok": False, "error": "missing_command_id"}, status=400
|
|
)
|
|
# Non-blocking check for runtime command result
|
|
result = async_to_sync(transport.wait_runtime_command_result)(
|
|
service, command_id, timeout=0.1
|
|
)
|
|
if result is None:
|
|
age_s = transport.runtime_command_age_seconds(service, command_id)
|
|
if age_s is not None and age_s >= timeout_s:
|
|
timeout_result = {
|
|
"ok": False,
|
|
"error": f"runtime_command_timeout:{int(timeout_s)}s",
|
|
}
|
|
if is_hx_request:
|
|
return render(
|
|
request,
|
|
"partials/compose-send-status.html",
|
|
{
|
|
"notice_message": str(
|
|
timeout_result.get("error") or "Send failed."
|
|
),
|
|
"notice_level": "danger",
|
|
},
|
|
)
|
|
return JsonResponse({"pending": False, "result": timeout_result})
|
|
return HttpResponse(status=204)
|
|
if is_hx_request:
|
|
ok = bool(result.get("ok")) if isinstance(result, dict) else False
|
|
message = "" if ok else str((result or {}).get("error") or "Send failed.")
|
|
level = "success" if ok else "danger"
|
|
return render(
|
|
request,
|
|
"partials/compose-send-status.html",
|
|
{
|
|
"notice_message": message,
|
|
"notice_level": level,
|
|
},
|
|
)
|
|
return JsonResponse({"pending": False, "result": result})
|
|
|
|
|
|
class ComposeMediaBlob(LoginRequiredMixin, View):
|
|
"""
|
|
Serve cached media blobs for authenticated compose image previews.
|
|
"""
|
|
|
|
def get(self, request):
|
|
blob_key = str(request.GET.get("key") or "").strip()
|
|
if not blob_key:
|
|
return HttpResponseBadRequest("Missing blob key.")
|
|
|
|
row = media_bridge.get_blob(blob_key)
|
|
if not row:
|
|
return HttpResponseNotFound("Blob not found.")
|
|
|
|
content = row.get("content") or b""
|
|
content_type = str(row.get("content_type") or "application/octet-stream")
|
|
filename = str(row.get("filename") or "attachment.bin")
|
|
response = HttpResponse(content, content_type=content_type)
|
|
response["Content-Length"] = str(len(content))
|
|
response["Content-Disposition"] = f'inline; filename="{filename}"'
|
|
return response
|
|
|
|
|
|
class ComposeDrafts(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse({"ok": False, "error": "Missing contact identifier."})
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
limit = _safe_limit(request.GET.get("limit") or 60)
|
|
messages = _messages_for_ai(request.user, base["person_identifier"], limit)
|
|
if not messages:
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"cached": False,
|
|
"drafts": _fallback_drafts(),
|
|
}
|
|
)
|
|
|
|
last_ts = int(messages[-1].ts or 0)
|
|
cache_key = _compose_ai_cache_key(
|
|
"drafts",
|
|
request.user.id,
|
|
base["service"],
|
|
base["identifier"],
|
|
base["person"].id if base["person"] else "",
|
|
last_ts,
|
|
limit,
|
|
)
|
|
cached = cache.get(cache_key)
|
|
if cached:
|
|
return JsonResponse({"ok": True, "cached": True, "drafts": cached})
|
|
|
|
ai_obj = AI.objects.filter(user=request.user).first()
|
|
transcript = messages_to_string(
|
|
messages,
|
|
author_rewrites={
|
|
"USER": _owner_name(request.user),
|
|
"BOT": "Assistant",
|
|
},
|
|
)
|
|
drafts = _fallback_drafts()
|
|
if ai_obj is not None:
|
|
try:
|
|
result = async_to_sync(ai_runner.run_prompt)(
|
|
_build_draft_prompt(
|
|
owner_name=_owner_name(request.user),
|
|
person_name=base["person"].name if base["person"] else "Other",
|
|
transcript=transcript,
|
|
),
|
|
ai_obj,
|
|
)
|
|
parsed = _parse_draft_options(result)
|
|
if parsed:
|
|
drafts = parsed
|
|
except Exception:
|
|
pass
|
|
|
|
cache.set(cache_key, drafts, timeout=COMPOSE_AI_CACHE_TTL)
|
|
return JsonResponse({"ok": True, "cached": False, "drafts": drafts})
|
|
|
|
|
|
class ComposeSummary(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse({"ok": False, "error": "Missing contact identifier."})
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
limit = _safe_limit(request.GET.get("limit") or 60)
|
|
messages = _messages_for_ai(request.user, base["person_identifier"], limit)
|
|
if not messages:
|
|
return JsonResponse({"ok": True, "cached": False, "summary": ""})
|
|
|
|
last_ts = int(messages[-1].ts or 0)
|
|
cache_key = _compose_ai_cache_key(
|
|
"summary",
|
|
request.user.id,
|
|
base["service"],
|
|
base["identifier"],
|
|
base["person"].id if base["person"] else "",
|
|
last_ts,
|
|
limit,
|
|
)
|
|
cached = cache.get(cache_key)
|
|
if cached:
|
|
return JsonResponse({"ok": True, "cached": True, "summary": cached})
|
|
|
|
ai_obj = AI.objects.filter(user=request.user).first()
|
|
transcript = messages_to_string(
|
|
messages,
|
|
author_rewrites={
|
|
"USER": _owner_name(request.user),
|
|
"BOT": "Assistant",
|
|
},
|
|
)
|
|
if ai_obj is None:
|
|
fallback = (
|
|
"Headlines:\n"
|
|
"- Conversation loaded.\n"
|
|
"Patterns:\n"
|
|
"- Not enough AI context configured yet.\n"
|
|
"Suggested Next Message:\n"
|
|
"- I want us to keep this clear and constructive."
|
|
)
|
|
cache.set(cache_key, fallback, timeout=COMPOSE_AI_CACHE_TTL)
|
|
return JsonResponse({"ok": True, "cached": False, "summary": fallback})
|
|
|
|
try:
|
|
summary = async_to_sync(ai_runner.run_prompt)(
|
|
_build_summary_prompt(
|
|
owner_name=_owner_name(request.user),
|
|
person_name=base["person"].name if base["person"] else "Other",
|
|
transcript=transcript,
|
|
),
|
|
ai_obj,
|
|
)
|
|
except Exception as exc:
|
|
return JsonResponse({"ok": False, "error": str(exc)})
|
|
|
|
summary = str(summary or "").strip()
|
|
cache.set(cache_key, summary, timeout=COMPOSE_AI_CACHE_TTL)
|
|
return JsonResponse({"ok": True, "cached": False, "summary": summary})
|
|
|
|
|
|
class ComposeQuickInsights(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse({"ok": False, "error": "Missing contact identifier."})
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
person = base["person"]
|
|
if person is None:
|
|
return JsonResponse(
|
|
{
|
|
"ok": False,
|
|
"error": "Quick Insights needs a linked person.",
|
|
}
|
|
)
|
|
|
|
conversation = (
|
|
WorkspaceConversation.objects.filter(
|
|
user=request.user,
|
|
participants=person,
|
|
)
|
|
.order_by("-last_event_ts", "-created_at")
|
|
.first()
|
|
)
|
|
if conversation is None:
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"empty": True,
|
|
"summary": {
|
|
"person_name": person.name,
|
|
"platform": "",
|
|
"state": "Calibrating",
|
|
"thread": "",
|
|
"last_event": "",
|
|
"last_ai_run": "",
|
|
"workspace_created": "",
|
|
"snapshot_count": 0,
|
|
"platform_docs": _metric_copy("platform", "Platform"),
|
|
"state_docs": _metric_copy(
|
|
"stability_state", "Participant State"
|
|
),
|
|
"thread_docs": _metric_copy("thread", "Thread"),
|
|
"snapshot_docs": {
|
|
"calculation": (
|
|
"Count of stored workspace metric snapshots for this person."
|
|
),
|
|
"psychology": (
|
|
"More points improve trend reliability; sparse points are "
|
|
"best treated as directional signals."
|
|
),
|
|
},
|
|
},
|
|
"rows": [],
|
|
"docs": [
|
|
"Quick Insights needs at least one workspace conversation snapshot.",
|
|
"Run AI operations in AI Workspace to generate the first data points.",
|
|
],
|
|
}
|
|
)
|
|
|
|
payload = _quick_insights_rows(conversation)
|
|
participant_state = _participant_feedback_state_label(conversation, person)
|
|
selected_platform_label = _service_label(base["service"])
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"empty": False,
|
|
"summary": {
|
|
"person_name": person.name,
|
|
"platform": selected_platform_label,
|
|
"platform_scope": "All linked platforms",
|
|
"state": participant_state
|
|
or conversation.get_stability_state_display(),
|
|
"stability_state": conversation.get_stability_state_display(),
|
|
"thread": conversation.platform_thread_id or "",
|
|
"last_event": _format_ts_label(conversation.last_event_ts or 0)
|
|
if conversation.last_event_ts
|
|
else "",
|
|
"last_ai_run": (
|
|
dj_timezone.localtime(conversation.last_ai_run_at).strftime(
|
|
"%Y-%m-%d %H:%M"
|
|
)
|
|
if conversation.last_ai_run_at
|
|
else ""
|
|
),
|
|
"workspace_created": dj_timezone.localtime(
|
|
conversation.created_at
|
|
).strftime("%Y-%m-%d %H:%M"),
|
|
"snapshot_count": payload["snapshot_count"],
|
|
"platform_docs": _metric_copy("platform", "Platform"),
|
|
"state_docs": _metric_copy("stability_state", "Participant State"),
|
|
"thread_docs": _metric_copy("thread", "Thread"),
|
|
"snapshot_docs": {
|
|
"calculation": (
|
|
"Count of stored workspace metric snapshots for this person."
|
|
),
|
|
"psychology": (
|
|
"More points improve trend reliability; sparse points are "
|
|
"best treated as directional signals."
|
|
),
|
|
},
|
|
},
|
|
"rows": payload["rows"],
|
|
"docs": [
|
|
"Each row shows current value, percent change vs previous point, and data-point count.",
|
|
"Arrow color indicates improving or risk direction for that metric.",
|
|
"State uses participant feedback (Withdrawing/Overextending/Balanced) when available.",
|
|
"Values are computed from all linked platform messages for this person.",
|
|
"Face indicator maps value range to positive, mixed, or strained climate.",
|
|
"Use this card for fast triage; open AI Workspace for full graphs and details.",
|
|
],
|
|
}
|
|
)
|
|
|
|
|
|
class ComposeEngagePreview(LoginRequiredMixin, View):
|
|
def get(self, request):
|
|
service = _default_service(request.GET.get("service"))
|
|
identifier = str(request.GET.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.GET.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse({"ok": False, "error": "Missing contact identifier."})
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
limit = _safe_limit(request.GET.get("limit") or 60)
|
|
messages = _messages_for_ai(request.user, base["person_identifier"], limit)
|
|
transcript = messages_to_string(
|
|
messages,
|
|
author_rewrites={
|
|
"USER": _owner_name(request.user),
|
|
"BOT": "Assistant",
|
|
},
|
|
)
|
|
|
|
owner_name = _owner_name(request.user)
|
|
recipient_name = base["person"].name if base["person"] else "Other"
|
|
plan = _latest_plan_for_person(request.user, base["person"])
|
|
source_options = _engage_source_options(plan)
|
|
source_options_with_custom = (
|
|
[{"value": "auto", "label": "Auto"}]
|
|
+ source_options
|
|
+ [{"value": "custom", "label": "Custom"}]
|
|
)
|
|
source_ref = str(request.GET.get("source_ref") or "auto").strip().lower()
|
|
custom_text = str(request.GET.get("custom_text") or "").strip()
|
|
|
|
source_obj = None
|
|
source_kind = ""
|
|
selected_source = source_ref if source_ref else "auto"
|
|
if selected_source == "custom":
|
|
selected_source = "custom"
|
|
else:
|
|
if selected_source == "auto":
|
|
fallback_obj, fallback_kind = _best_engage_source(plan)
|
|
if fallback_obj is not None:
|
|
source_obj = fallback_obj
|
|
source_kind = fallback_kind
|
|
else:
|
|
source_obj, source_kind, explicit_ref = _engage_source_from_ref(
|
|
plan,
|
|
selected_source,
|
|
)
|
|
if source_obj is None:
|
|
selected_source = "auto"
|
|
fallback_obj, fallback_kind = _best_engage_source(plan)
|
|
if fallback_obj is not None:
|
|
source_obj = fallback_obj
|
|
source_kind = fallback_kind
|
|
else:
|
|
selected_source = explicit_ref
|
|
|
|
preview = ""
|
|
outbound = ""
|
|
artifact_label = "AI-generated"
|
|
if selected_source == "custom":
|
|
outbound = _plain_text(custom_text)
|
|
if outbound:
|
|
preview = f"**Custom Engage** (Correction)\n\nGuidance:\n{outbound}"
|
|
artifact_label = "Custom"
|
|
else:
|
|
preview = (
|
|
"**Custom Engage** (Correction)\n\nGuidance:\n"
|
|
"Enter your custom engagement text to preview."
|
|
)
|
|
elif source_obj is not None:
|
|
payload = _build_engage_payload(
|
|
source_obj=source_obj,
|
|
source_kind=source_kind,
|
|
share_target="other",
|
|
framing="shared",
|
|
context_note="",
|
|
owner_name=owner_name,
|
|
recipient_name=recipient_name,
|
|
)
|
|
preview = str(payload.get("preview") or "").strip()
|
|
outbound = _engage_body_only(payload.get("outbound") or "")
|
|
artifact_label = (
|
|
f"{source_kind.title()}: {getattr(source_obj, 'title', '')}"
|
|
)
|
|
else:
|
|
ai_obj = AI.objects.filter(user=request.user).first()
|
|
if ai_obj is not None:
|
|
try:
|
|
generated = async_to_sync(ai_runner.run_prompt)(
|
|
_build_engage_prompt(owner_name, recipient_name, transcript),
|
|
ai_obj,
|
|
)
|
|
outbound = _plain_text(generated)
|
|
except Exception:
|
|
outbound = ""
|
|
if not outbound:
|
|
outbound = (
|
|
"We should slow down, clarify what we mean, and respond with care."
|
|
)
|
|
preview = f"**Shared Engage** (Correction)\n\nGuidance:\n{outbound}"
|
|
|
|
token = ""
|
|
if outbound:
|
|
token = signing.dumps(
|
|
{
|
|
"u": request.user.id,
|
|
"s": base["service"],
|
|
"i": base["identifier"],
|
|
"p": str(base["person"].id) if base["person"] else "",
|
|
"outbound": outbound,
|
|
"exp": int(time.time()) + (60 * 10),
|
|
},
|
|
salt=COMPOSE_ENGAGE_TOKEN_SALT,
|
|
)
|
|
return JsonResponse(
|
|
{
|
|
"ok": True,
|
|
"preview": preview,
|
|
"outbound": outbound,
|
|
"token": token,
|
|
"artifact": artifact_label,
|
|
"options": source_options_with_custom,
|
|
"selected_source": selected_source,
|
|
"custom_text": custom_text,
|
|
}
|
|
)
|
|
|
|
|
|
class ComposeEngageSend(LoginRequiredMixin, View):
|
|
def post(self, request):
|
|
service = _default_service(request.POST.get("service"))
|
|
identifier = str(request.POST.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.POST.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
if not identifier and person is None:
|
|
return JsonResponse({"ok": False, "error": "Missing contact identifier."})
|
|
|
|
failsafe_arm = str(request.POST.get("failsafe_arm") or "").strip()
|
|
failsafe_confirm = str(request.POST.get("failsafe_confirm") or "").strip()
|
|
if failsafe_arm != "1" or failsafe_confirm != "1":
|
|
return JsonResponse(
|
|
{"ok": False, "error": "Enable send confirmation before sending."}
|
|
)
|
|
|
|
token = str(request.POST.get("engage_token") or "").strip()
|
|
if not token:
|
|
return JsonResponse({"ok": False, "error": "Missing engage token."})
|
|
try:
|
|
payload = signing.loads(token, salt=COMPOSE_ENGAGE_TOKEN_SALT)
|
|
except Exception:
|
|
return JsonResponse({"ok": False, "error": "Invalid engage token."})
|
|
|
|
if int(payload.get("u") or 0) != int(request.user.id):
|
|
return JsonResponse({"ok": False, "error": "Token does not match user."})
|
|
if int(payload.get("exp") or 0) < int(time.time()):
|
|
return JsonResponse({"ok": False, "error": "Engage token expired."})
|
|
outbound = str(payload.get("outbound") or "").strip()
|
|
if not outbound:
|
|
return JsonResponse({"ok": False, "error": "Empty engage payload."})
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
ts = async_to_sync(transport.send_message_raw)(
|
|
base["service"],
|
|
base["identifier"],
|
|
text=outbound,
|
|
attachments=[],
|
|
)
|
|
if not ts:
|
|
return JsonResponse({"ok": False, "error": "Send failed."})
|
|
|
|
if base["person_identifier"] is not None:
|
|
session, _ = ChatSession.objects.get_or_create(
|
|
user=request.user,
|
|
identifier=base["person_identifier"],
|
|
)
|
|
ts_value = int(ts) if str(ts).isdigit() else int(time.time() * 1000)
|
|
Message.objects.create(
|
|
user=request.user,
|
|
session=session,
|
|
sender_uuid="",
|
|
text=outbound,
|
|
ts=ts_value,
|
|
delivered_ts=ts_value if str(ts).isdigit() else None,
|
|
custom_author="USER",
|
|
)
|
|
|
|
return JsonResponse({"ok": True, "message": "Shared engage sent."})
|
|
|
|
|
|
class ComposeSend(LoginRequiredMixin, View):
|
|
@staticmethod
|
|
def _response(request, *, ok, message="", level="info", panel_id=""):
|
|
response = render(
|
|
request,
|
|
"partials/compose-send-status.html",
|
|
{
|
|
"notice_message": message,
|
|
"notice_level": level,
|
|
},
|
|
)
|
|
trigger_payload = {
|
|
"composeSendResult": {
|
|
"ok": bool(ok),
|
|
"message": str(message or ""),
|
|
"level": str(level or "info"),
|
|
"panel_id": str(panel_id or ""),
|
|
}
|
|
}
|
|
# Optional: include command id to allow client-side cancellation UI
|
|
if hasattr(request, "_compose_command_id") and request._compose_command_id:
|
|
trigger_payload["composeSendCommandId"] = {
|
|
"command_id": str(request._compose_command_id)
|
|
}
|
|
if ok:
|
|
trigger_payload["composeMessageSent"] = {"panel_id": str(panel_id or "")}
|
|
response["HX-Trigger"] = json.dumps(trigger_payload)
|
|
return response
|
|
|
|
def post(self, request):
|
|
service = _default_service(request.POST.get("service"))
|
|
identifier = str(request.POST.get("identifier") or "").strip()
|
|
person = None
|
|
person_id = request.POST.get("person")
|
|
if person_id:
|
|
person = get_object_or_404(Person, id=person_id, user=request.user)
|
|
render_mode = str(request.POST.get("render_mode") or "page").strip().lower()
|
|
if render_mode not in {"page", "widget"}:
|
|
render_mode = "page"
|
|
panel_id = str(request.POST.get("panel_id") or "").strip()
|
|
|
|
if not identifier and person is None:
|
|
return HttpResponseBadRequest("Missing contact identifier.")
|
|
|
|
failsafe_arm = str(request.POST.get("failsafe_arm") or "").strip()
|
|
failsafe_confirm = str(request.POST.get("failsafe_confirm") or "").strip()
|
|
if failsafe_arm != "1" or failsafe_confirm != "1":
|
|
return self._response(
|
|
request,
|
|
ok=False,
|
|
message="Enable send confirmation before sending.",
|
|
level="warning",
|
|
panel_id=panel_id,
|
|
)
|
|
|
|
text = str(request.POST.get("text") or "").strip()
|
|
if not text:
|
|
return self._response(
|
|
request,
|
|
ok=False,
|
|
message="Message is empty.",
|
|
level="danger",
|
|
panel_id=panel_id,
|
|
)
|
|
|
|
base = _context_base(request.user, service, identifier, person)
|
|
from core.util import logs as util_logs
|
|
|
|
logger = util_logs.get_logger("compose")
|
|
log_prefix = (
|
|
f"[ComposeSend] service={base['service']} identifier={base['identifier']}"
|
|
)
|
|
logger.debug(f"{log_prefix} text_len={len(text)} attempting send")
|
|
|
|
# If runtime is out-of-process, enqueue command and return immediately (non-blocking).
|
|
# Expose command id for cancellation so the client can cancel or poll later.
|
|
runtime_client = transport.get_runtime_client(base["service"]) or None
|
|
logger.debug(
|
|
f"{log_prefix} runtime_client={type(runtime_client).__name__ if runtime_client else 'None (queued)'}"
|
|
)
|
|
ts = None
|
|
command_id = None
|
|
if runtime_client is None:
|
|
if base["service"] == "whatsapp":
|
|
runtime_state = transport.get_runtime_state("whatsapp")
|
|
last_seen = int(runtime_state.get("runtime_seen_at") or 0)
|
|
is_connected = bool(runtime_state.get("connected"))
|
|
pair_status = (
|
|
str(runtime_state.get("pair_status") or "").strip().lower()
|
|
)
|
|
now_s = int(time.time())
|
|
# Runtime may process sends even when `connected` lags false briefly;
|
|
# heartbeat freshness is the reliable signal for queue availability.
|
|
heartbeat_age = now_s - last_seen if last_seen > 0 else 10**9
|
|
runtime_healthy = bool(is_connected) or pair_status == "connected"
|
|
if (not runtime_healthy) and (last_seen <= 0 or heartbeat_age > 20):
|
|
logger.warning(
|
|
f"{log_prefix} runtime heartbeat stale (connected={is_connected}, pair_status={pair_status}, last_seen={last_seen}, age={heartbeat_age}); refusing queued send"
|
|
)
|
|
return self._response(
|
|
request,
|
|
ok=False,
|
|
message=(
|
|
"WhatsApp runtime is not connected right now. "
|
|
"Please wait for reconnect, then retry send."
|
|
),
|
|
level="warning",
|
|
panel_id=panel_id,
|
|
)
|
|
command_id = transport.enqueue_runtime_command(
|
|
base["service"],
|
|
"send_message_raw",
|
|
{"recipient": base["identifier"], "text": text, "attachments": []},
|
|
)
|
|
logger.debug(f"{log_prefix} command_id={command_id} enqueued")
|
|
# attach command id to request so _response can include it in HX-Trigger
|
|
request._compose_command_id = command_id
|
|
# Do NOT wait here — return immediately so the UI doesn't block.
|
|
# Record a pending message locally so the thread shows the outgoing message.
|
|
ts = int(time.time() * 1000)
|
|
else:
|
|
# In-process runtime can perform the send synchronously and return a timestamp.
|
|
ts = async_to_sync(transport.send_message_raw)(
|
|
base["service"],
|
|
base["identifier"],
|
|
text=text,
|
|
attachments=[],
|
|
)
|
|
# For queued sends we set `ts` to a local timestamp; for in-process sends ts may be False.
|
|
if not ts:
|
|
return self._response(
|
|
request,
|
|
ok=False,
|
|
message="Send failed. Check service account state.",
|
|
level="danger",
|
|
panel_id=panel_id,
|
|
)
|
|
|
|
if base["person_identifier"] is not None:
|
|
session, _ = ChatSession.objects.get_or_create(
|
|
user=request.user,
|
|
identifier=base["person_identifier"],
|
|
)
|
|
# For in-process sends (Signal, etc), ts is a timestamp or True.
|
|
# For queued sends (WhatsApp/UR), ts is a local timestamp.
|
|
# Set delivered_ts only if we got a real timestamp OR if it's an in-process sync send.
|
|
msg_ts = int(ts) if str(ts).isdigit() else int(time.time() * 1000)
|
|
delivered_ts = msg_ts if runtime_client is not None else None
|
|
Message.objects.create(
|
|
user=request.user,
|
|
session=session,
|
|
sender_uuid="",
|
|
text=text,
|
|
ts=msg_ts,
|
|
delivered_ts=delivered_ts,
|
|
custom_author="USER",
|
|
)
|
|
# Notify XMPP clients from runtime so cross-platform sends appear there too.
|
|
if base["service"] in {"signal", "whatsapp"}:
|
|
try:
|
|
transport.enqueue_runtime_command(
|
|
base["service"],
|
|
"notify_xmpp_sent",
|
|
{
|
|
"person_identifier_id": str(base["person_identifier"].id),
|
|
"text": text,
|
|
},
|
|
)
|
|
except Exception as exc:
|
|
logger.warning(f"{log_prefix} failed to enqueue xmpp notify: {exc}")
|
|
|
|
# If we enqueued, inform the client the message is queued and include command id.
|
|
if runtime_client is None:
|
|
return self._response(
|
|
request,
|
|
ok=True,
|
|
message="Message queued for sending.",
|
|
level="info",
|
|
panel_id=panel_id,
|
|
)
|
|
|
|
return self._response(
|
|
request,
|
|
ok=True,
|
|
message="",
|
|
level="success",
|
|
panel_id=panel_id,
|
|
)
|