Lightweight containerized prosody tooling + moved auth scripts + xmpp reconnect/auth stabilization

This commit is contained in:
2026-03-05 02:18:12 +00:00
parent 0718a06c19
commit 2140c5facf
69 changed files with 3767 additions and 144 deletions

View File

@@ -642,6 +642,12 @@ class HandleMessage(Command):
actor=(
effective_source_uuid or effective_source_number or ""
),
target_author=str(
(reaction_payload.get("raw") or {}).get("targetAuthorUuid")
or (reaction_payload.get("raw") or {}).get("targetAuthor")
or (reaction_payload.get("raw") or {}).get("targetAuthorNumber")
or ""
),
remove=bool(reaction_payload.get("remove")),
payload=reaction_payload.get("raw") or {},
)
@@ -1308,6 +1314,12 @@ class SignalClient(ClientBase):
emoji=str(reaction_payload.get("emoji") or ""),
source_service="signal",
actor=(source_uuid or source_number or ""),
target_author=str(
(reaction_payload.get("raw") or {}).get("targetAuthorUuid")
or (reaction_payload.get("raw") or {}).get("targetAuthor")
or (reaction_payload.get("raw") or {}).get("targetAuthorNumber")
or ""
),
remove=bool(reaction_payload.get("remove")),
payload=reaction_payload.get("raw") or {},
)
@@ -1453,6 +1465,12 @@ class SignalClient(ClientBase):
emoji=str(reaction_payload.get("emoji") or ""),
source_service="signal",
actor=(source_uuid or source_number or ""),
target_author=str(
(reaction_payload.get("raw") or {}).get("targetAuthorUuid")
or (reaction_payload.get("raw") or {}).get("targetAuthor")
or (reaction_payload.get("raw") or {}).get("targetAuthorNumber")
or ""
),
remove=bool(reaction_payload.get("remove")),
payload=reaction_payload.get("raw") or {},
)

View File

@@ -240,13 +240,17 @@ async def send_reaction(
):
base = getattr(settings, "SIGNAL_HTTP_URL", "http://signal:8080").rstrip("/")
sender_number = settings.SIGNAL_NUMBER
if not recipient_uuid or not target_timestamp:
normalized_recipient = normalize_signal_recipient(recipient_uuid)
normalized_target_author = normalize_signal_recipient(
str(target_author or normalized_recipient)
)
if not normalized_recipient or not target_timestamp:
return False
payload = {
"recipient": recipient_uuid,
"recipient": normalized_recipient,
"reaction": str(emoji or ""),
"target_author": str(target_author or recipient_uuid),
"target_author": normalized_target_author,
"timestamp": int(target_timestamp),
"remove": bool(remove),
}

View File

@@ -17,6 +17,7 @@ from django.core.cache import cache
from core.clients import signalapi
from core.messaging import media_bridge
from core.transports.capabilities import supports, unsupported_reason
from core.util import logs
log = logs.get_logger("transport")
@@ -32,6 +33,10 @@ def _service_key(service: str) -> str:
return str(service or "").strip().lower()
def _capability_checks_enabled() -> bool:
return bool(getattr(settings, "CAPABILITY_ENFORCEMENT_ENABLED", True))
def _runtime_key(service: str) -> str:
return f"gia:service:runtime:{_service_key(service)}"
@@ -898,6 +903,10 @@ async def send_reaction(
remove: bool = False,
):
service_key = _service_key(service)
if _capability_checks_enabled() and not supports(service_key, "reactions"):
reason = unsupported_reason(service_key, "reactions")
log.warning("capability-check failed service=%s feature=reactions: %s", service_key, reason)
return False
if not str(emoji or "").strip() and not remove:
return False
@@ -968,6 +977,13 @@ async def send_reaction(
async def start_typing(service: str, recipient: str):
service_key = _service_key(service)
if _capability_checks_enabled() and not supports(service_key, "typing"):
log.warning(
"capability-check failed service=%s feature=typing: %s",
service_key,
unsupported_reason(service_key, "typing"),
)
return False
if service_key == "signal":
await signalapi.start_typing(recipient)
return True
@@ -998,6 +1014,13 @@ async def start_typing(service: str, recipient: str):
async def stop_typing(service: str, recipient: str):
service_key = _service_key(service)
if _capability_checks_enabled() and not supports(service_key, "typing"):
log.warning(
"capability-check failed service=%s feature=typing: %s",
service_key,
unsupported_reason(service_key, "typing"),
)
return False
if service_key == "signal":
await signalapi.stop_typing(recipient)
return True

View File

@@ -135,6 +135,9 @@ class XMPPComponent(ComponentXMPP):
def __init__(self, ur, jid, secret, server, port):
self.ur = ur
self._upload_config_warned = False
self._reconnect_task = None
self._reconnect_delay_seconds = 1.0
self._reconnect_delay_max_seconds = 30.0
self.log = logs.get_logger("XMPP")
@@ -821,14 +824,49 @@ class XMPPComponent(ComponentXMPP):
async def session_start(self, *args):
self.log.info("XMPP session started")
self._reconnect_delay_seconds = 1.0
if self._reconnect_task and not self._reconnect_task.done():
self._reconnect_task.cancel()
self._reconnect_task = None
await self.enable_carbons()
async def _reconnect_loop(self):
try:
while True:
delay = float(self._reconnect_delay_seconds)
await asyncio.sleep(delay)
try:
self.log.info("XMPP reconnect attempt delay_s=%.1f", delay)
connected = self.connect()
if connected is False:
raise RuntimeError("connect returned false")
self.process(forever=False)
return
except Exception as exc:
self.log.warning("XMPP reconnect attempt failed: %s", exc)
self._reconnect_delay_seconds = min(
self._reconnect_delay_max_seconds,
max(1.0, float(self._reconnect_delay_seconds) * 2.0),
)
except asyncio.CancelledError:
return
finally:
self._reconnect_task = None
def _schedule_reconnect(self):
if self._reconnect_task and not self._reconnect_task.done():
return
self._reconnect_task = self.loop.create_task(self._reconnect_loop())
def on_disconnected(self, *args):
"""
Handles XMPP disconnection and triggers a reconnect loop.
"""
self.log.warning("XMPP disconnected, attempting to reconnect...")
self.connect()
self.log.warning(
"XMPP disconnected, scheduling reconnect attempt in %.1fs",
float(self._reconnect_delay_seconds),
)
self._schedule_reconnect()
async def request_upload_slot(self, recipient, filename, content_type, size):
"""
@@ -1716,7 +1754,7 @@ class XMPPClient(ClientBase):
self.client.loop = self.loop
self.client.connect()
# self.client.process()
self.client.process(forever=False)
async def start_typing_for_person(self, user, person_identifier):
await self.client.send_typing_for_person(user, person_identifier, True)

View File

@@ -15,6 +15,7 @@ from core.commands.registry import get as get_handler
from core.commands.registry import register
from core.messaging.reply_sync import is_mirrored_origin
from core.models import CommandAction, CommandChannelBinding, CommandProfile, Message
from core.tasks.chat_defaults import ensure_default_source_for_chat
from core.util import logs
log = logs.get_logger("command_engine")
@@ -187,6 +188,12 @@ def _auto_setup_profile_bindings_for_first_command(
service=service,
channel_identifier__in=alternate_variants,
).update(enabled=False)
ensure_default_source_for_chat(
user=trigger_message.user,
service=service,
channel_identifier=canonical,
message=trigger_message,
)
def ensure_handlers_registered():

View File

@@ -21,6 +21,7 @@ from core.models import (
TaskProviderConfig,
)
from core.tasks.codex_support import channel_variants, resolve_external_chat_id
from core.tasks.codex_approval import queue_codex_event_with_pre_approval
_CODEX_DEFAULT_RE = re.compile(
r"^\s*(?:\.codex\b|#codex#?)(?P<body>.*)$",
@@ -278,25 +279,48 @@ class CodexCommandHandler(CommandHandler):
"resolution_note",
]
)
if request.external_sync_event_id:
await sync_to_async(ExternalSyncEvent.objects.filter(id=request.external_sync_event_id).update)(
status="ok",
error="",
)
run = request.codex_run
run.status = "approved_waiting_resume"
run.error = ""
await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"])
source_service = str(run.source_service or "")
source_channel = str(run.source_channel or "")
provider_payload = dict(run.request_payload.get("provider_payload") or {})
provider_payload.update(
{
"mode": "approval_response",
"approval_key": approval_key,
"resume_payload": dict(request.resume_payload or {}),
"codex_run_id": str(run.id),
"source_service": source_service,
"source_channel": source_channel,
}
)
resume_payload = dict(request.resume_payload or {})
resume_action = str(resume_payload.get("action") or "").strip().lower()
resume_provider_payload = dict(resume_payload.get("provider_payload") or {})
if resume_action and resume_provider_payload:
provider_payload = dict(resume_provider_payload)
provider_payload["codex_run_id"] = str(run.id)
provider_payload["source_service"] = source_service
provider_payload["source_channel"] = source_channel
event_action = resume_action
resume_idempotency_key = str(resume_payload.get("idempotency_key") or "").strip()
resume_event_key = (
resume_idempotency_key
if resume_idempotency_key
else f"codex_approval:{approval_key}:approved"
)
else:
provider_payload = dict(run.request_payload.get("provider_payload") or {})
provider_payload.update(
{
"mode": "approval_response",
"approval_key": approval_key,
"resume_payload": dict(request.resume_payload or {}),
"codex_run_id": str(run.id),
"source_service": source_service,
"source_channel": source_channel,
}
)
event_action = "append_update"
resume_event_key = f"codex_approval:{approval_key}:approved"
await sync_to_async(ExternalSyncEvent.objects.update_or_create)(
idempotency_key=f"codex_approval:{approval_key}:approved",
idempotency_key=resume_event_key,
defaults={
"user": trigger.user,
"task_id": run.task_id,
@@ -304,7 +328,7 @@ class CodexCommandHandler(CommandHandler):
"provider": "codex_cli",
"status": "pending",
"payload": {
"action": "append_update",
"action": event_action,
"provider_payload": provider_payload,
},
"error": "",
@@ -319,6 +343,11 @@ class CodexCommandHandler(CommandHandler):
await sync_to_async(request.save)(
update_fields=["status", "resolved_at", "resolved_by_identifier", "resolution_note"]
)
if request.external_sync_event_id:
await sync_to_async(ExternalSyncEvent.objects.filter(id=request.external_sync_event_id).update)(
status="failed",
error="approval_denied",
)
run = request.codex_run
run.status = "denied"
run.error = "approval_denied"
@@ -411,7 +440,7 @@ class CodexCommandHandler(CommandHandler):
source_service=service,
source_channel=channel,
external_chat_id=external_chat_id,
status="queued",
status="waiting_approval",
request_payload={"action": "append_update", "provider_payload": dict(payload)},
result_payload={},
error="",
@@ -421,22 +450,20 @@ class CodexCommandHandler(CommandHandler):
await sync_to_async(run.save)(update_fields=["request_payload", "updated_at"])
idempotency_key = f"codex_cmd:{trigger.id}:{mode}:{task.id}:{hashlib.sha1(str(body_text or '').encode('utf-8')).hexdigest()[:12]}"
await sync_to_async(ExternalSyncEvent.objects.update_or_create)(
await sync_to_async(queue_codex_event_with_pre_approval)(
user=trigger.user,
run=run,
task=task,
task_event=None,
action="append_update",
provider_payload=dict(payload),
idempotency_key=idempotency_key,
defaults={
"user": trigger.user,
"task": task,
"task_event": None,
"provider": "codex_cli",
"status": "pending",
"payload": {
"action": "append_update",
"provider_payload": dict(payload),
},
"error": "",
},
)
return CommandResult(ok=True, status="ok", payload={"codex_run_id": str(run.id)})
return CommandResult(
ok=True,
status="ok",
payload={"codex_run_id": str(run.id), "approval_required": True},
)
async def execute(self, ctx: CommandContext) -> CommandResult:
trigger = await self._load_trigger(ctx.message_id)

View File

@@ -63,9 +63,6 @@ def ensure_variant_policies_for_profile(
result: dict[str, CommandVariantPolicy] = {}
if str(profile.slug or "").strip() == "bp":
# Keep source-chat status visible for BP to avoid "silent success" confusion.
if str(profile.visibility_mode or "").strip() == "status_in_source":
CommandVariantPolicy.objects.filter(profile=profile).update(send_status_to_source=True)
for key in BP_VARIANT_KEYS:
meta = BP_VARIANT_META.get(key, {})
defaults = _bp_defaults(profile, key, post_result_enabled)

16
core/events/__init__.py Normal file
View File

@@ -0,0 +1,16 @@
from core.events.ledger import (
append_event,
append_event_sync,
event_ledger_enabled,
event_ledger_status,
)
from core.events.projection import project_session_from_events, shadow_compare_session
__all__ = [
"append_event",
"append_event_sync",
"event_ledger_enabled",
"event_ledger_status",
"project_session_from_events",
"shadow_compare_session",
]

108
core/events/ledger.py Normal file
View File

@@ -0,0 +1,108 @@
from __future__ import annotations
import time
from asgiref.sync import sync_to_async
from django.conf import settings
from core.models import ConversationEvent
from core.observability.tracing import ensure_trace_id
def event_ledger_enabled() -> bool:
return bool(getattr(settings, "EVENT_LEDGER_DUAL_WRITE", False))
def event_ledger_status() -> dict:
return {
"event_ledger_dual_write": bool(getattr(settings, "EVENT_LEDGER_DUAL_WRITE", False)),
"event_primary_write_path": bool(getattr(settings, "EVENT_PRIMARY_WRITE_PATH", False)),
}
def _normalize_direction(value: str) -> str:
direction = str(value or "system").strip().lower()
if direction not in {"in", "out", "system"}:
return "system"
return direction
def _safe_ts(value: int | None) -> int:
if value is None:
return int(time.time() * 1000)
try:
parsed = int(value)
except Exception:
return int(time.time() * 1000)
if parsed <= 0:
return int(time.time() * 1000)
return parsed
def append_event_sync(
*,
user,
session,
event_type: str,
direction: str,
actor_identifier: str = "",
origin_transport: str = "",
origin_message_id: str = "",
origin_chat_id: str = "",
payload: dict | None = None,
raw_payload: dict | None = None,
trace_id: str = "",
ts: int | None = None,
):
if not event_ledger_enabled():
return None
normalized_type = str(event_type or "").strip().lower()
if not normalized_type:
raise ValueError("event_type is required")
candidates = {
str(choice[0]) for choice in ConversationEvent.EVENT_TYPE_CHOICES
}
if normalized_type not in candidates:
raise ValueError(f"unsupported event_type: {normalized_type}")
normalized_direction = _normalize_direction(direction)
normalized_trace = ensure_trace_id(trace_id, payload or {})
transport = str(origin_transport or "").strip().lower()
message_id = str(origin_message_id or "").strip()
dedup_row = None
if transport and message_id:
dedup_row = (
ConversationEvent.objects.filter(
user=user,
session=session,
event_type=normalized_type,
origin_transport=transport,
origin_message_id=message_id,
)
.order_by("-created_at")
.first()
)
if dedup_row is not None:
return dedup_row
return ConversationEvent.objects.create(
user=user,
session=session,
ts=_safe_ts(ts),
event_type=normalized_type,
direction=normalized_direction,
actor_identifier=str(actor_identifier or "").strip(),
origin_transport=transport,
origin_message_id=message_id,
origin_chat_id=str(origin_chat_id or "").strip(),
payload=dict(payload or {}),
raw_payload=dict(raw_payload or {}),
trace_id=normalized_trace,
)
async def append_event(**kwargs):
return await sync_to_async(append_event_sync, thread_sensitive=True)(**kwargs)

293
core/events/projection.py Normal file
View File

@@ -0,0 +1,293 @@
from __future__ import annotations
from dataclasses import dataclass
from core.models import ChatSession, ConversationEvent, Message
@dataclass
class _ProjectedMessage:
message_id: str
ts: int = 0
text: str = ""
delivered_ts: int | None = None
read_ts: int | None = None
reactions: dict[tuple[str, str, str], dict] | None = None
def __post_init__(self):
if self.reactions is None:
self.reactions = {}
def _safe_int(value, default=0) -> int:
try:
return int(value)
except Exception:
return int(default)
def _reaction_key(row: dict) -> tuple[str, str, str]:
item = dict(row or {})
return (
str(item.get("source_service") or "").strip().lower(),
str(item.get("actor") or "").strip(),
str(item.get("emoji") or "").strip(),
)
def _normalize_reactions(rows: list[dict] | None) -> list[dict]:
merged = {}
for row in list(rows or []):
item = dict(row or {})
key = _reaction_key(item)
if not any(key):
continue
merged[key] = {
"source_service": key[0],
"actor": key[1],
"emoji": key[2],
"removed": bool(item.get("removed")),
}
return sorted(
merged.values(),
key=lambda entry: (
str(entry.get("source_service") or ""),
str(entry.get("actor") or ""),
str(entry.get("emoji") or ""),
bool(entry.get("removed")),
),
)
def project_session_from_events(session: ChatSession) -> list[dict]:
rows = list(
ConversationEvent.objects.filter(
user=session.user,
session=session,
).order_by("ts", "created_at")
)
projected: dict[str, _ProjectedMessage] = {}
order: list[str] = []
for event in rows:
payload = dict(event.payload or {})
event_type = str(event.event_type or "").strip().lower()
message_id = str(
payload.get("message_id") or payload.get("target_message_id") or ""
).strip()
if event_type == "message_created":
message_id = str(
payload.get("message_id") or event.origin_message_id or ""
).strip()
if not message_id:
continue
state = projected.get(message_id)
if state is None:
state = _ProjectedMessage(message_id=message_id)
projected[message_id] = state
order.append(message_id)
state.ts = _safe_int(payload.get("message_ts"), _safe_int(event.ts))
state.text = str(payload.get("text") or state.text or "")
delivered_default = _safe_int(payload.get("delivered_ts"), _safe_int(event.ts))
if state.delivered_ts is None:
state.delivered_ts = delivered_default or None
continue
if not message_id or message_id not in projected:
continue
state = projected[message_id]
if event_type == "read_receipt":
read_ts = _safe_int(payload.get("read_ts"), _safe_int(event.ts))
if read_ts > 0:
if state.read_ts is None:
state.read_ts = read_ts
else:
state.read_ts = max(int(state.read_ts or 0), read_ts)
if state.delivered_ts is None and read_ts > 0:
state.delivered_ts = read_ts
continue
if event_type in {"reaction_added", "reaction_removed"}:
source_service = str(payload.get("source_service") or event.origin_transport or "").strip().lower()
actor = str(payload.get("actor") or event.actor_identifier or "").strip()
emoji = str(payload.get("emoji") or "").strip()
if not source_service and not actor and not emoji:
continue
key = (source_service, actor, emoji)
state.reactions[key] = {
"source_service": source_service,
"actor": actor,
"emoji": emoji,
"removed": bool(event_type == "reaction_removed" or payload.get("remove")),
}
output = []
for message_id in order:
state = projected.get(message_id)
if state is None:
continue
output.append(
{
"message_id": str(state.message_id),
"ts": int(state.ts or 0),
"text": str(state.text or ""),
"delivered_ts": (
int(state.delivered_ts)
if state.delivered_ts is not None
else None
),
"read_ts": int(state.read_ts) if state.read_ts is not None else None,
"reactions": _normalize_reactions(list((state.reactions or {}).values())),
}
)
return output
def shadow_compare_session(session: ChatSession, detail_limit: int = 50) -> dict:
projected_rows = project_session_from_events(session)
projected_by_id = {str(row.get("message_id") or ""): row for row in projected_rows}
db_rows = list(
Message.objects.filter(user=session.user, session=session)
.order_by("ts", "id")
.values(
"id",
"ts",
"text",
"delivered_ts",
"read_ts",
"receipt_payload",
)
)
db_by_id = {str(row.get("id")): dict(row) for row in db_rows}
counters = {
"missing_in_projection": 0,
"missing_in_db": 0,
"text_mismatch": 0,
"ts_mismatch": 0,
"delivered_ts_mismatch": 0,
"read_ts_mismatch": 0,
"reactions_mismatch": 0,
}
details = []
cause_counts = {
"missing_event_write": 0,
"ambiguous_reaction_target": 0,
"payload_normalization_gap": 0,
}
def _record_detail(message_id: str, issue: str, cause: str, extra: dict | None = None):
if cause in cause_counts:
cause_counts[cause] += 1
if len(details) < max(0, int(detail_limit)):
row = {"message_id": message_id, "issue": issue, "cause": cause}
if extra:
row.update(dict(extra))
details.append(row)
for message_id, db_row in db_by_id.items():
projected = projected_by_id.get(message_id)
if projected is None:
counters["missing_in_projection"] += 1
_record_detail(message_id, "missing_in_projection", "missing_event_write")
continue
db_text = str(db_row.get("text") or "")
projected_text = str(projected.get("text") or "")
if db_text != projected_text:
counters["text_mismatch"] += 1
_record_detail(
message_id,
"text_mismatch",
"payload_normalization_gap",
{"db": db_text, "projected": projected_text},
)
db_ts = _safe_int(db_row.get("ts"), 0)
projected_ts = _safe_int(projected.get("ts"), 0)
if db_ts != projected_ts:
counters["ts_mismatch"] += 1
_record_detail(
message_id,
"ts_mismatch",
"payload_normalization_gap",
{"db": db_ts, "projected": projected_ts},
)
db_delivered_ts = db_row.get("delivered_ts")
projected_delivered_ts = projected.get("delivered_ts")
if (
(db_delivered_ts is None) != (projected_delivered_ts is None)
or (
db_delivered_ts is not None
and projected_delivered_ts is not None
and int(db_delivered_ts) != int(projected_delivered_ts)
)
):
counters["delivered_ts_mismatch"] += 1
_record_detail(
message_id,
"delivered_ts_mismatch",
"payload_normalization_gap",
{
"db": db_delivered_ts,
"projected": projected_delivered_ts,
},
)
db_read_ts = db_row.get("read_ts")
projected_read_ts = projected.get("read_ts")
if (
(db_read_ts is None) != (projected_read_ts is None)
or (
db_read_ts is not None
and projected_read_ts is not None
and int(db_read_ts) != int(projected_read_ts)
)
):
counters["read_ts_mismatch"] += 1
_record_detail(
message_id,
"read_ts_mismatch",
"payload_normalization_gap",
{"db": db_read_ts, "projected": projected_read_ts},
)
db_reactions = _normalize_reactions(
list((db_row.get("receipt_payload") or {}).get("reactions") or [])
)
projected_reactions = _normalize_reactions(list(projected.get("reactions") or []))
if db_reactions != projected_reactions:
counters["reactions_mismatch"] += 1
cause = "payload_normalization_gap"
strategy = str(
((db_row.get("receipt_payload") or {}).get("reaction_last_match_strategy") or "")
).strip()
if strategy == "nearest_ts_window":
cause = "ambiguous_reaction_target"
_record_detail(
message_id,
"reactions_mismatch",
cause,
{"db": db_reactions, "projected": projected_reactions},
)
for message_id in projected_by_id.keys():
if message_id not in db_by_id:
counters["missing_in_db"] += 1
_record_detail(message_id, "missing_in_db", "payload_normalization_gap")
mismatch_total = int(sum(int(value or 0) for value in counters.values()))
return {
"session_id": str(session.id),
"db_message_count": len(db_rows),
"projected_message_count": len(projected_rows),
"mismatch_total": mismatch_total,
"counters": counters,
"cause_counts": cause_counts,
"details": details,
}

View File

@@ -196,6 +196,26 @@ class Command(BaseCommand):
result=result_payload,
)
event.save(update_fields=["status", "error", "payload", "updated_at"])
mode = str(provider_payload.get("mode") or "").strip().lower()
approval_key = str(provider_payload.get("approval_key") or "").strip()
if mode == "approval_response" and approval_key:
req = (
CodexPermissionRequest.objects.select_related("external_sync_event", "codex_run")
.filter(user=event.user, approval_key=approval_key)
.first()
)
if req and req.external_sync_event_id:
if result.ok:
ExternalSyncEvent.objects.filter(id=req.external_sync_event_id).update(
status="ok",
error="",
)
elif str(event.error or "").strip() == "approval_denied":
ExternalSyncEvent.objects.filter(id=req.external_sync_event_id).update(
status="failed",
error="approval_denied",
)
if codex_run is not None:
codex_run.status = "ok" if result.ok else "failed"
codex_run.error = str(result.error or "")

View File

@@ -0,0 +1,68 @@
from __future__ import annotations
import json
import time
from django.core.management.base import BaseCommand
from core.models import ConversationEvent
class Command(BaseCommand):
help = "Quick non-mutating sanity check for recent canonical event writes."
def add_arguments(self, parser):
parser.add_argument("--minutes", type=int, default=120)
parser.add_argument("--service", default="")
parser.add_argument("--user-id", default="")
parser.add_argument("--limit", type=int, default=200)
parser.add_argument("--json", action="store_true", default=False)
def handle(self, *args, **options):
minutes = max(1, int(options.get("minutes") or 120))
service = str(options.get("service") or "").strip().lower()
user_id = str(options.get("user_id") or "").strip()
limit = max(1, int(options.get("limit") or 200))
as_json = bool(options.get("json"))
cutoff_ts = int(time.time() * 1000) - (minutes * 60 * 1000)
queryset = ConversationEvent.objects.filter(ts__gte=cutoff_ts).order_by("-ts")
if service:
queryset = queryset.filter(origin_transport=service)
if user_id:
queryset = queryset.filter(user_id=user_id)
rows = list(
queryset.values(
"id",
"user_id",
"session_id",
"ts",
"event_type",
"direction",
"origin_transport",
"trace_id",
)[:limit]
)
event_type_counts = {}
for row in rows:
key = str(row.get("event_type") or "")
event_type_counts[key] = int(event_type_counts.get(key) or 0) + 1
payload = {
"minutes": minutes,
"service": service,
"user_id": user_id,
"count": len(rows),
"event_type_counts": event_type_counts,
"sample": rows[:25],
}
if as_json:
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
return
self.stdout.write(
f"event-ledger-smoke minutes={minutes} service={service or '-'} user={user_id or '-'} count={len(rows)}"
)
self.stdout.write(f"event_type_counts={event_type_counts}")

View File

@@ -0,0 +1,129 @@
from __future__ import annotations
import json
import time
from django.core.management.base import BaseCommand, CommandError
from core.events.projection import shadow_compare_session
from core.models import ChatSession, Message
class Command(BaseCommand):
help = (
"Run event->message shadow projection comparison and emit mismatch counters "
"per chat session."
)
def add_arguments(self, parser):
parser.add_argument("--user-id", default="")
parser.add_argument("--session-id", default="")
parser.add_argument("--service", default="")
parser.add_argument("--recent-minutes", type=int, default=0)
parser.add_argument("--limit-sessions", type=int, default=50)
parser.add_argument("--detail-limit", type=int, default=25)
parser.add_argument("--fail-on-mismatch", action="store_true", default=False)
parser.add_argument("--json", action="store_true", default=False)
def handle(self, *args, **options):
user_id = str(options.get("user_id") or "").strip()
session_id = str(options.get("session_id") or "").strip()
service = str(options.get("service") or "").strip().lower()
recent_minutes = max(0, int(options.get("recent_minutes") or 0))
limit_sessions = max(1, int(options.get("limit_sessions") or 50))
detail_limit = max(0, int(options.get("detail_limit") or 25))
as_json = bool(options.get("json"))
fail_on_mismatch = bool(options.get("fail_on_mismatch"))
sessions = ChatSession.objects.all().order_by("-last_interaction", "id")
if user_id:
sessions = sessions.filter(user_id=user_id)
if session_id:
sessions = sessions.filter(id=session_id)
if service:
sessions = sessions.filter(identifier__service=service)
if recent_minutes > 0:
cutoff_ts = int(time.time() * 1000) - (recent_minutes * 60 * 1000)
recent_session_ids = (
Message.objects.filter(ts__gte=cutoff_ts)
.values_list("session_id", flat=True)
.distinct()
)
sessions = sessions.filter(id__in=recent_session_ids)
sessions = list(sessions.select_related("user", "identifier")[:limit_sessions])
if not sessions:
raise CommandError("No chat sessions matched.")
aggregate = {
"sessions_scanned": 0,
"db_message_count": 0,
"projected_message_count": 0,
"mismatch_total": 0,
"counters": {
"missing_in_projection": 0,
"missing_in_db": 0,
"text_mismatch": 0,
"ts_mismatch": 0,
"delivered_ts_mismatch": 0,
"read_ts_mismatch": 0,
"reactions_mismatch": 0,
},
"cause_counts": {
"missing_event_write": 0,
"ambiguous_reaction_target": 0,
"payload_normalization_gap": 0,
},
}
results = []
for session in sessions:
compared = shadow_compare_session(session, detail_limit=detail_limit)
aggregate["sessions_scanned"] += 1
aggregate["db_message_count"] += int(compared.get("db_message_count") or 0)
aggregate["projected_message_count"] += int(compared.get("projected_message_count") or 0)
aggregate["mismatch_total"] += int(compared.get("mismatch_total") or 0)
for key in aggregate["counters"].keys():
aggregate["counters"][key] += int(
(compared.get("counters") or {}).get(key) or 0
)
for key in aggregate["cause_counts"].keys():
aggregate["cause_counts"][key] += int(
(compared.get("cause_counts") or {}).get(key) or 0
)
results.append(compared)
payload = {
"filters": {
"user_id": user_id,
"session_id": session_id,
"service": service,
"recent_minutes": recent_minutes,
"limit_sessions": limit_sessions,
"detail_limit": detail_limit,
},
"aggregate": aggregate,
"sessions": results,
}
if as_json:
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
else:
self.stdout.write(
"shadow compare: "
f"sessions={aggregate['sessions_scanned']} "
f"db={aggregate['db_message_count']} "
f"projected={aggregate['projected_message_count']} "
f"mismatches={aggregate['mismatch_total']}"
)
self.stdout.write(f"counters={aggregate['counters']}")
self.stdout.write(f"cause_counts={aggregate['cause_counts']}")
for row in results:
self.stdout.write(
f"session={row.get('session_id')} mismatch_total={row.get('mismatch_total')} "
f"db={row.get('db_message_count')} projected={row.get('projected_message_count')}"
)
if fail_on_mismatch and int(aggregate["mismatch_total"] or 0) > 0:
raise CommandError(
f"Shadow projection mismatch detected: {aggregate['mismatch_total']}"
)

View File

@@ -2,7 +2,9 @@ from asgiref.sync import sync_to_async
from django.conf import settings
import uuid
from core.events.ledger import append_event
from core.messaging.utils import messages_to_string
from core.observability.tracing import ensure_trace_id
from core.models import ChatSession, Message, QueuedMessage
from core.util import logs
@@ -158,6 +160,8 @@ async def store_message(
reply_source_service="",
reply_source_message_id="",
message_meta=None,
trace_id="",
raw_payload=None,
):
log.debug("Storing message for session=%s outgoing=%s", session.id, outgoing)
msg = await sync_to_async(Message.objects.create)(
@@ -176,6 +180,29 @@ async def store_message(
reply_source_message_id=str(reply_source_message_id or "").strip() or None,
message_meta=dict(message_meta or {}),
)
try:
await append_event(
user=session.user,
session=session,
ts=int(ts),
event_type="message_created",
direction="out" if bool(outgoing) else "in",
actor_identifier=str(sender or ""),
origin_transport=str(source_service or ""),
origin_message_id=str(source_message_id or ""),
origin_chat_id=str(source_chat_id or ""),
payload={
"message_id": str(msg.id),
"text": str(text or ""),
"reply_source_service": str(reply_source_service or ""),
"reply_source_message_id": str(reply_source_message_id or ""),
"outgoing": bool(outgoing),
},
raw_payload=dict(raw_payload or {}),
trace_id=ensure_trace_id(trace_id, message_meta or {}),
)
except Exception as exc:
log.warning("Event ledger append failed for message=%s: %s", msg.id, exc)
return msg
@@ -193,6 +220,8 @@ async def store_own_message(
reply_source_service="",
reply_source_message_id="",
message_meta=None,
trace_id="",
raw_payload=None,
):
log.debug("Storing own message for session=%s queue=%s", session.id, queue)
cast = {
@@ -219,6 +248,30 @@ async def store_own_message(
msg = await sync_to_async(msg_object.objects.create)(
**cast,
)
if msg_object is Message:
try:
await append_event(
user=session.user,
session=session,
ts=int(ts),
event_type="message_created",
direction="out",
actor_identifier="BOT",
origin_transport=str(source_service or ""),
origin_message_id=str(source_message_id or ""),
origin_chat_id=str(source_chat_id or ""),
payload={
"message_id": str(msg.id),
"text": str(text or ""),
"queued": bool(queue),
"reply_source_service": str(reply_source_service or ""),
"reply_source_message_id": str(reply_source_message_id or ""),
},
raw_payload=dict(raw_payload or {}),
trace_id=ensure_trace_id(trace_id, message_meta or {}),
)
except Exception as exc:
log.warning("Event ledger append failed for own message=%s: %s", msg.id, exc)
return msg
@@ -235,6 +288,7 @@ async def apply_read_receipts(
source_service="signal",
read_by_identifier="",
payload=None,
trace_id="",
):
"""
Persist delivery/read metadata for one identifier's messages.
@@ -283,6 +337,33 @@ async def apply_read_receipts(
if dirty:
await sync_to_async(message.save)(update_fields=dirty)
updated += 1
try:
await append_event(
user=user,
session=message.session,
ts=int(read_at or message.ts or 0),
event_type="read_receipt",
direction="system",
actor_identifier=str(read_by_identifier or ""),
origin_transport=str(source_service or ""),
origin_message_id=str(message.source_message_id or message.id),
origin_chat_id=str(message.source_chat_id or ""),
payload={
"message_id": str(message.id),
"message_ts": int(message.ts or 0),
"read_ts": int(read_at or 0),
"read_by_identifier": str(read_by_identifier or ""),
"timestamps": [int(v) for v in ts_values],
},
raw_payload=dict(payload or {}),
trace_id=ensure_trace_id(trace_id, payload or {}),
)
except Exception as exc:
log.warning(
"Event ledger append failed for read receipt message=%s: %s",
message.id,
exc,
)
return updated
@@ -297,6 +378,8 @@ async def apply_reaction(
actor="",
remove=False,
payload=None,
trace_id="",
target_author="",
):
log.debug(
"reaction-bridge history-apply start user=%s person_identifier=%s target_message_id=%s target_ts=%s source=%s actor=%s remove=%s emoji=%s",
@@ -315,6 +398,8 @@ async def apply_reaction(
).select_related("session")
target = None
match_strategy = "none"
target_author_value = str(target_author or "").strip()
target_uuid = str(target_message_id or "").strip()
if target_uuid:
is_uuid = True
@@ -326,12 +411,16 @@ async def apply_reaction(
target = await sync_to_async(
lambda: queryset.filter(id=target_uuid).order_by("-ts").first()
)()
if target is not None:
match_strategy = "local_message_id"
if target is None:
target = await sync_to_async(
lambda: queryset.filter(source_message_id=target_uuid)
.order_by("-ts")
.first()
)()
if target is not None:
match_strategy = "source_message_id"
if target is None:
try:
@@ -339,11 +428,64 @@ async def apply_reaction(
except Exception:
ts_value = 0
if ts_value > 0:
# Signal reactions target source timestamp; prefer deterministic exact matches.
exact_candidates = await sync_to_async(list)(
queryset.filter(source_message_id=str(ts_value)).order_by("-ts")[:20]
)
if target_author_value and exact_candidates:
filtered = [
row
for row in exact_candidates
if str(row.sender_uuid or "").strip() == target_author_value
]
if filtered:
exact_candidates = filtered
if exact_candidates:
target = exact_candidates[0]
match_strategy = "exact_source_message_id_ts"
log.debug(
"reaction-bridge history-apply exact-source-ts target_ts=%s picked_message_id=%s candidates=%s",
ts_value,
str(target.id),
len(exact_candidates),
)
if target is None and ts_value > 0:
strict_ts_rows = await sync_to_async(list)(
queryset.filter(ts=ts_value).order_by("-id")[:20]
)
if target_author_value and strict_ts_rows:
filtered = [
row
for row in strict_ts_rows
if str(row.sender_uuid or "").strip() == target_author_value
]
if filtered:
strict_ts_rows = filtered
if strict_ts_rows:
target = strict_ts_rows[0]
match_strategy = "strict_ts_match"
log.debug(
"reaction-bridge history-apply strict-ts target_ts=%s picked_message_id=%s candidates=%s",
ts_value,
str(target.id),
len(strict_ts_rows),
)
if target is None and ts_value > 0:
lower = ts_value - 10_000
upper = ts_value + 10_000
window_rows = await sync_to_async(list)(
queryset.filter(ts__gte=lower, ts__lte=upper).order_by("ts")[:200]
)
if target_author_value and window_rows:
author_rows = [
row
for row in window_rows
if str(row.sender_uuid or "").strip() == target_author_value
]
if author_rows:
window_rows = author_rows
if window_rows:
target = min(
window_rows,
@@ -359,6 +501,7 @@ async def apply_reaction(
int(target.ts or 0),
len(window_rows),
)
match_strategy = "nearest_ts_window"
if target is None:
log.warning(
@@ -371,10 +514,13 @@ async def apply_reaction(
return None
reactions = list((target.receipt_payload or {}).get("reactions") or [])
normalized_source = str(source_service or "").strip().lower()
normalized_actor = str(actor or "").strip()
normalized_emoji = str(emoji or "").strip()
reaction_key = (
str(source_service or "").strip().lower(),
str(actor or "").strip(),
str(emoji or "").strip(),
normalized_source,
normalized_actor,
normalized_emoji,
)
merged = []
@@ -386,31 +532,94 @@ async def apply_reaction(
str(row.get("actor") or "").strip(),
str(row.get("emoji") or "").strip(),
)
if not row_key[2] and bool(row.get("removed")):
# Keep malformed remove rows out of active reaction set.
continue
if row_key == reaction_key:
row["removed"] = bool(remove)
row["updated_at"] = int(target_ts or target.ts or 0)
row["payload"] = dict(payload or {})
row["match_strategy"] = match_strategy
merged.append(row)
replaced = True
continue
merged.append(row)
if not replaced:
if not replaced and (normalized_emoji or not bool(remove)):
merged.append(
{
"emoji": str(emoji or ""),
"source_service": str(source_service or ""),
"actor": str(actor or ""),
"emoji": normalized_emoji,
"source_service": normalized_source,
"actor": normalized_actor,
"removed": bool(remove),
"updated_at": int(target_ts or target.ts or 0),
"payload": dict(payload or {}),
"match_strategy": match_strategy,
}
)
elif not replaced and bool(remove):
receipt_payload = dict(target.receipt_payload or {})
reaction_events = list(receipt_payload.get("reaction_events") or [])
reaction_events.append(
{
"emoji": normalized_emoji,
"source_service": normalized_source,
"actor": normalized_actor,
"removed": True,
"updated_at": int(target_ts or target.ts or 0),
"payload": dict(payload or {}),
"match_strategy": match_strategy,
"skip_reason": "remove_without_emoji_or_match",
}
)
if len(reaction_events) > 200:
reaction_events = reaction_events[-200:]
receipt_payload["reaction_events"] = reaction_events
target.receipt_payload = receipt_payload
await sync_to_async(target.save)(update_fields=["receipt_payload"])
log.debug(
"reaction-bridge history-apply remove-without-match message_id=%s strategy=%s",
str(target.id),
match_strategy,
)
return target
receipt_payload = dict(target.receipt_payload or {})
receipt_payload["reactions"] = merged
if match_strategy:
receipt_payload["reaction_last_match_strategy"] = str(match_strategy)
target.receipt_payload = receipt_payload
await sync_to_async(target.save)(update_fields=["receipt_payload"])
try:
await append_event(
user=user,
session=target.session,
ts=int(target_ts or target.ts or 0),
event_type="reaction_removed" if bool(remove) else "reaction_added",
direction="system",
actor_identifier=str(actor or ""),
origin_transport=str(source_service or ""),
origin_message_id=str(target.source_message_id or target.id),
origin_chat_id=str(target.source_chat_id or ""),
payload={
"message_id": str(target.id),
"target_message_id": str(target_message_id or target.id),
"target_ts": int(target_ts or target.ts or 0),
"emoji": str(emoji or ""),
"remove": bool(remove),
"source_service": normalized_source,
"actor": normalized_actor,
"match_strategy": match_strategy,
},
raw_payload=dict(payload or {}),
trace_id=ensure_trace_id(trace_id, payload or {}),
)
except Exception as exc:
log.warning(
"Event ledger append failed for reaction on message=%s: %s",
target.id,
exc,
)
log.debug(
"reaction-bridge history-apply ok message_id=%s reactions=%s",
str(target.id),

View File

@@ -0,0 +1,165 @@
import uuid
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("core", "0034_codexrun_codexpermissionrequest_and_more"),
]
operations = [
migrations.CreateModel(
name="ConversationEvent",
fields=[
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
("ts", models.BigIntegerField(db_index=True, help_text="Event timestamp (unix ms).")),
(
"event_type",
models.CharField(
choices=[
("message_created", "Message Created"),
("message_edited", "Message Edited"),
("message_deleted", "Message Deleted"),
("reaction_added", "Reaction Added"),
("reaction_removed", "Reaction Removed"),
("read_receipt", "Read Receipt"),
("typing_started", "Typing Started"),
("typing_stopped", "Typing Stopped"),
("participant_added", "Participant Added"),
("participant_removed", "Participant Removed"),
("delivery_receipt", "Delivery Receipt"),
],
max_length=64,
),
),
(
"direction",
models.CharField(
choices=[("in", "Inbound"), ("out", "Outbound"), ("system", "System")],
max_length=16,
),
),
("actor_identifier", models.CharField(blank=True, default="", max_length=255)),
(
"origin_transport",
models.CharField(
blank=True,
choices=[
("signal", "Signal"),
("whatsapp", "WhatsApp"),
("xmpp", "XMPP"),
("instagram", "Instagram"),
("web", "Web"),
],
default="",
max_length=32,
),
),
("origin_message_id", models.CharField(blank=True, default="", max_length=255)),
("origin_chat_id", models.CharField(blank=True, default="", max_length=255)),
("payload", models.JSONField(blank=True, default=dict)),
("raw_payload", models.JSONField(blank=True, default=dict)),
("trace_id", models.CharField(blank=True, default="", max_length=64)),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"session",
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="core.chatsession"),
),
(
"user",
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
],
options={
"ordering": ["ts", "created_at"],
},
),
migrations.CreateModel(
name="AdapterHealthEvent",
fields=[
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
(
"service",
models.CharField(
choices=[
("signal", "Signal"),
("whatsapp", "WhatsApp"),
("xmpp", "XMPP"),
("instagram", "Instagram"),
],
max_length=32,
),
),
(
"status",
models.CharField(
choices=[
("ok", "OK"),
("degraded", "Degraded"),
("down", "Down"),
("recovering", "Recovering"),
],
max_length=32,
),
),
("reason", models.TextField(blank=True, default="")),
("retry_meta", models.JSONField(blank=True, default=dict)),
(
"ts",
models.BigIntegerField(
db_index=True,
help_text="Health transition timestamp (unix ms).",
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"ordering": ["-ts", "-created_at"],
},
),
migrations.AddIndex(
model_name="conversationevent",
index=models.Index(fields=["user", "session", "ts"], name="core_conver_user_id_96580b_idx"),
),
migrations.AddIndex(
model_name="conversationevent",
index=models.Index(
fields=["origin_transport", "origin_message_id"],
name="core_conver_origin__283534_idx",
),
),
migrations.AddIndex(
model_name="conversationevent",
index=models.Index(fields=["trace_id"], name="core_conver_trace_i_4ed2ec_idx"),
),
migrations.AddIndex(
model_name="conversationevent",
index=models.Index(
fields=["event_type", "created_at"],
name="core_conver_event_t_a16f3e_idx",
),
),
migrations.AddIndex(
model_name="adapterhealthevent",
index=models.Index(fields=["service", "ts"], name="core_adapte_service_f6e8d4_idx"),
),
migrations.AddIndex(
model_name="adapterhealthevent",
index=models.Index(
fields=["status", "created_at"],
name="core_adapte_status_6529f5_idx",
),
),
]

View File

@@ -391,6 +391,85 @@ class Message(models.Model):
]
class ConversationEvent(models.Model):
EVENT_TYPE_CHOICES = (
("message_created", "Message Created"),
("message_edited", "Message Edited"),
("message_deleted", "Message Deleted"),
("reaction_added", "Reaction Added"),
("reaction_removed", "Reaction Removed"),
("read_receipt", "Read Receipt"),
("typing_started", "Typing Started"),
("typing_stopped", "Typing Stopped"),
("participant_added", "Participant Added"),
("participant_removed", "Participant Removed"),
("delivery_receipt", "Delivery Receipt"),
)
DIRECTION_CHOICES = (
("in", "Inbound"),
("out", "Outbound"),
("system", "System"),
)
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
session = models.ForeignKey(ChatSession, on_delete=models.CASCADE)
ts = models.BigIntegerField(
db_index=True,
help_text="Event timestamp (unix ms).",
)
event_type = models.CharField(max_length=64, choices=EVENT_TYPE_CHOICES)
direction = models.CharField(max_length=16, choices=DIRECTION_CHOICES)
actor_identifier = models.CharField(max_length=255, blank=True, default="")
origin_transport = models.CharField(
max_length=32, blank=True, default="", choices=CHANNEL_SERVICE_CHOICES
)
origin_message_id = models.CharField(max_length=255, blank=True, default="")
origin_chat_id = models.CharField(max_length=255, blank=True, default="")
payload = models.JSONField(default=dict, blank=True)
raw_payload = models.JSONField(default=dict, blank=True)
trace_id = models.CharField(max_length=64, blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ["ts", "created_at"]
indexes = [
models.Index(fields=["user", "session", "ts"]),
models.Index(fields=["origin_transport", "origin_message_id"]),
models.Index(fields=["trace_id"]),
models.Index(fields=["event_type", "created_at"]),
]
class AdapterHealthEvent(models.Model):
STATUS_CHOICES = (
("ok", "OK"),
("degraded", "Degraded"),
("down", "Down"),
("recovering", "Recovering"),
)
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
service = models.CharField(max_length=32, choices=SERVICE_CHOICES)
status = models.CharField(max_length=32, choices=STATUS_CHOICES)
reason = models.TextField(blank=True, default="")
retry_meta = models.JSONField(default=dict, blank=True)
ts = models.BigIntegerField(
db_index=True,
help_text="Health transition timestamp (unix ms).",
)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ["-ts", "-created_at"]
indexes = [
models.Index(fields=["service", "ts"]),
models.Index(fields=["status", "created_at"]),
]
class Group(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)

View File

@@ -5,6 +5,7 @@ from asgiref.sync import sync_to_async
from django.conf import settings
from core.clients import transport
from core.events import event_ledger_status
from core.clients.instagram import InstagramClient
from core.clients.signal import SignalClient
from core.clients.whatsapp import WhatsAppClient
@@ -18,6 +19,7 @@ from core.presence import AvailabilitySignal, record_native_signal
from core.realtime.typing_state import set_person_typing_state
from core.translation.engine import process_inbound_translation
from core.util import logs
from core.observability.tracing import ensure_trace_id
class UnifiedRouter(object):
@@ -34,6 +36,13 @@ class UnifiedRouter(object):
self.log = logs.get_logger("router")
self.log.info("Initialised Unified Router Interface.")
self.log.info(
"runtime-flags event_ledger_dual_write=%s event_primary_write_path=%s trace_propagation=%s capability_enforcement=%s",
bool(event_ledger_status().get("event_ledger_dual_write")),
bool(event_ledger_status().get("event_primary_write_path")),
bool(getattr(settings, "TRACE_PROPAGATION_ENABLED", True)),
bool(getattr(settings, "CAPABILITY_ENFORCEMENT_ENABLED", True)),
)
self.xmpp = XMPPClient(self, loop, "xmpp")
self.signal = SignalClient(self, loop, "signal")
@@ -99,6 +108,12 @@ class UnifiedRouter(object):
self.log.info(f"Message received ({protocol}) {args} {kwargs}")
identifier = kwargs.get("identifier")
local_message = kwargs.get("local_message")
payload = dict(kwargs.get("payload") or {})
trace_id = (
ensure_trace_id(payload=payload)
if bool(getattr(settings, "TRACE_PROPAGATION_ENABLED", True))
else ""
)
message_text = str(kwargs.get("text") or "").strip()
if local_message is None:
return
@@ -142,7 +157,10 @@ class UnifiedRouter(object):
message_id=str(local_message.id),
user_id=int(local_message.user_id),
message_text=message_text,
payload=dict(kwargs.get("payload") or {}),
payload=(
dict(kwargs.get("payload") or {})
| ({"trace_id": trace_id} if trace_id else {})
),
)
)
except Exception as exc:
@@ -219,6 +237,11 @@ class UnifiedRouter(object):
timestamps = kwargs.get("message_timestamps") or []
read_ts = kwargs.get("read_ts")
payload = kwargs.get("payload") or {}
trace_id = (
ensure_trace_id(payload=payload)
if bool(getattr(settings, "TRACE_PROPAGATION_ENABLED", True))
else ""
)
read_by = kwargs.get("read_by") or ""
identifiers = await self._resolve_identifier_objects(protocol, identifier)
@@ -231,6 +254,7 @@ class UnifiedRouter(object):
source_service=protocol,
read_by_identifier=read_by or row.identifier,
payload=payload,
trace_id=trace_id,
)
record_native_signal(
AvailabilitySignal(

View File

@@ -0,0 +1,3 @@
from core.observability.tracing import ensure_trace_id, new_trace_id
__all__ = ["new_trace_id", "ensure_trace_id"]

View File

@@ -0,0 +1,15 @@
import uuid
def new_trace_id() -> str:
return uuid.uuid4().hex
def ensure_trace_id(value: str = "", payload: dict | None = None) -> str:
explicit = str(value or "").strip()
if explicit:
return explicit
candidate = str((payload or {}).get("trace_id") or "").strip()
if candidate:
return candidate
return new_trace_id()

122
core/tasks/chat_defaults.py Normal file
View File

@@ -0,0 +1,122 @@
from __future__ import annotations
import re
from core.models import ChatTaskSource, TaskProject
from core.tasks.codex_support import channel_variants
SAFE_TASK_FLAGS_DEFAULTS = {
"derive_enabled": True,
"match_mode": "strict",
"require_prefix": True,
"allowed_prefixes": ["task:", "todo:"],
"completion_enabled": True,
"ai_title_enabled": True,
"announce_task_id": False,
"min_chars": 3,
}
def normalize_channel_identifier(service: str, identifier: str) -> str:
service_key = str(service or "").strip().lower()
value = str(identifier or "").strip()
if not value:
return ""
if service_key == "whatsapp":
bare = value.split("@", 1)[0].strip()
if not bare:
return value
if value.endswith("@s.whatsapp.net"):
return f"{bare}@s.whatsapp.net"
return f"{bare}@g.us"
return value
def resolve_message_scope(message) -> tuple[str, str]:
source_service = str(getattr(message, "source_service", "") or "").strip().lower()
source_channel = str(getattr(message, "source_chat_id", "") or "").strip()
if source_service != "web":
return source_service, source_channel
identifier = getattr(getattr(message, "session", None), "identifier", None)
fallback_service = str(getattr(identifier, "service", "") or "").strip().lower()
fallback_identifier = str(getattr(identifier, "identifier", "") or "").strip()
if fallback_service and fallback_identifier and fallback_service != "web":
return fallback_service, fallback_identifier
return source_service, source_channel
def _project_name_candidate(service: str, channel_identifier: str, message=None) -> str:
person_name = ""
if message is not None:
identifier = getattr(getattr(message, "session", None), "identifier", None)
person = getattr(identifier, "person", None)
person_name = str(getattr(person, "name", "") or "").strip()
if person_name:
return person_name[:255]
raw = str(channel_identifier or "").strip()
if str(service or "").strip().lower() == "whatsapp":
raw = raw.split("@", 1)[0].strip()
cleaned = re.sub(r"\s+", " ", raw).strip()
if not cleaned:
cleaned = "Chat"
return f"Chat: {cleaned}"[:255]
def _ensure_unique_project_name(user, base_name: str) -> str:
base = str(base_name or "").strip() or "Chat"
if not TaskProject.objects.filter(user=user, name=base).exists():
return base
idx = 2
while idx < 10000:
candidate = f"{base} ({idx})"[:255]
if not TaskProject.objects.filter(user=user, name=candidate).exists():
return candidate
idx += 1
return f"{base} ({str(user.id)[:8]})"[:255]
def ensure_default_source_for_chat(
*,
user,
service: str,
channel_identifier: str,
message=None,
):
service_key = str(service or "").strip().lower()
normalized_identifier = normalize_channel_identifier(service_key, channel_identifier)
variants = channel_variants(service_key, normalized_identifier)
if not service_key or not variants:
return None
existing = (
ChatTaskSource.objects.filter(
user=user,
service=service_key,
channel_identifier__in=variants,
)
.select_related("project", "epic")
.order_by("-enabled", "-updated_at", "-created_at")
.first()
)
if existing is not None:
if not existing.enabled:
existing.enabled = True
existing.save(update_fields=["enabled", "updated_at"])
return existing
project_name = _ensure_unique_project_name(
user,
_project_name_candidate(service_key, normalized_identifier, message=message),
)
project = TaskProject.objects.create(
user=user,
name=project_name,
settings=dict(SAFE_TASK_FLAGS_DEFAULTS),
)
return ChatTaskSource.objects.create(
user=user,
service=service_key,
channel_identifier=normalized_identifier,
project=project,
epic=None,
enabled=True,
settings=dict(SAFE_TASK_FLAGS_DEFAULTS),
)

View File

@@ -0,0 +1,91 @@
from __future__ import annotations
import hashlib
from asgiref.sync import async_to_sync
from core.clients.transport import send_message_raw
from core.models import CodexPermissionRequest, ExternalSyncEvent, TaskProviderConfig
def _deterministic_approval_key(idempotency_key: str) -> str:
digest = hashlib.sha1(str(idempotency_key or "").encode("utf-8")).hexdigest()[:12]
return f"pre-{digest}"
def queue_codex_event_with_pre_approval(
*,
user,
run,
task,
task_event,
action: str,
provider_payload: dict,
idempotency_key: str,
) -> tuple[ExternalSyncEvent, CodexPermissionRequest]:
approval_key = _deterministic_approval_key(idempotency_key)
waiting_event, _ = ExternalSyncEvent.objects.update_or_create(
idempotency_key=f"codex_waiting:{idempotency_key}",
defaults={
"user": user,
"task": task,
"task_event": task_event,
"provider": "codex_cli",
"status": "waiting_approval",
"payload": {
"action": str(action or "append_update"),
"provider_payload": dict(provider_payload or {}),
},
"error": "",
},
)
run.status = "waiting_approval"
run.error = ""
run.save(update_fields=["status", "error", "updated_at"])
request, _ = CodexPermissionRequest.objects.update_or_create(
approval_key=approval_key,
defaults={
"user": user,
"codex_run": run,
"external_sync_event": waiting_event,
"summary": "Pre-submit approval required before sending to Codex",
"requested_permissions": {
"type": "pre_submit",
"provider": "codex_cli",
"action": str(action or "append_update"),
},
"resume_payload": {
"gate_type": "pre_submit",
"action": str(action or "append_update"),
"provider_payload": dict(provider_payload or {}),
"idempotency_key": str(idempotency_key or ""),
},
"status": "pending",
"resolved_at": None,
"resolved_by_identifier": "",
"resolution_note": "",
},
)
cfg = TaskProviderConfig.objects.filter(user=user, provider="codex_cli", enabled=True).first()
settings_payload = dict(getattr(cfg, "settings", {}) or {})
approver_service = str(settings_payload.get("approver_service") or "").strip().lower()
approver_identifier = str(settings_payload.get("approver_identifier") or "").strip()
if approver_service and approver_identifier:
try:
async_to_sync(send_message_raw)(
approver_service,
approver_identifier,
text=(
f"[codex approval] key={approval_key}\n"
"summary=Pre-submit approval required before sending to Codex\n"
"requested=pre_submit\n"
f"use: .codex approve {approval_key} or .codex deny {approval_key}"
),
attachments=[],
metadata={"origin_tag": f"codex-pre-approval:{approval_key}"},
)
except Exception:
pass
return waiting_event, request

View File

@@ -20,6 +20,8 @@ from core.models import (
TaskEpic,
TaskProviderConfig,
)
from core.tasks.chat_defaults import ensure_default_source_for_chat, resolve_message_scope
from core.tasks.codex_approval import queue_codex_event_with_pre_approval
from core.tasks.providers import get_provider
from core.tasks.codex_support import resolve_external_chat_id
@@ -355,6 +357,17 @@ async def _emit_sync_event(task: DerivedTask, event: DerivedTaskEvent, action: s
# Worker-backed providers are queued and executed by `manage.py codex_worker`.
if bool(getattr(provider, "run_in_worker", False)):
if provider_name == "codex_cli":
await sync_to_async(queue_codex_event_with_pre_approval)(
user=task.user,
run=codex_run,
task=task,
task_event=event,
action=action,
provider_payload=dict(request_payload),
idempotency_key=idempotency_key,
)
return
await sync_to_async(ExternalSyncEvent.objects.update_or_create)(
idempotency_key=idempotency_key,
defaults={
@@ -526,6 +539,15 @@ async def _handle_epic_create_command(message: Message, sources: list[ChatTaskSo
return True
def _is_task_command_candidate(text: str) -> bool:
body = str(text or "").strip()
if not body:
return False
if _LIST_TASKS_RE.match(body) or _UNDO_TASK_RE.match(body) or _EPIC_CREATE_RE.match(body):
return True
return _has_task_prefix(body.lower(), ["task:", "todo:"])
async def process_inbound_task_intelligence(message: Message) -> None:
if message is None:
return
@@ -537,7 +559,20 @@ async def process_inbound_task_intelligence(message: Message) -> None:
sources = await _resolve_source_mappings(message)
if not sources:
return
if not _is_task_command_candidate(text):
return
service, channel = resolve_message_scope(message)
if not service or not channel:
return
seeded = await sync_to_async(ensure_default_source_for_chat)(
user=message.user,
service=service,
channel_identifier=channel,
message=message,
)
if seeded is None:
return
sources = [seeded]
if await _handle_scope_task_commands(message, sources, text):
return
if await _handle_epic_create_command(message, sources, text):

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import json
import subprocess
from hashlib import sha1
from .base import ProviderResult, TaskProvider
@@ -25,27 +26,106 @@ class CodexCLITaskProvider(TaskProvider):
def _profile(self, config: dict) -> str:
return str(config.get("default_profile") or "").strip()
def _is_task_sync_contract_mismatch(self, stderr: str) -> bool:
text = str(stderr or "").lower()
if "unexpected argument '--op'" in text:
return True
if "unexpected argument 'create'" in text and "usage: codex" in text:
return True
if "unexpected argument 'append_update'" in text and "usage: codex" in text:
return True
if "unexpected argument 'mark_complete'" in text and "usage: codex" in text:
return True
if "unexpected argument 'link_task'" in text and "usage: codex" in text:
return True
if "unrecognized subcommand 'create'" in text and "usage: codex" in text:
return True
if "unrecognized subcommand 'append_update'" in text and "usage: codex" in text:
return True
if "unrecognized subcommand 'mark_complete'" in text and "usage: codex" in text:
return True
return False
def _builtin_stub_result(self, op: str, payload: dict, stderr: str) -> ProviderResult:
mode = str(payload.get("mode") or "default").strip().lower()
external_key = (
str(payload.get("external_key") or "").strip()
or str(payload.get("task_id") or "").strip()
)
if mode == "approval_response":
return ProviderResult(
ok=True,
external_key=external_key,
payload={
"op": op,
"status": "ok",
"summary": "approval acknowledged; resumed by builtin codex stub",
"requires_approval": False,
"output": "",
"fallback_mode": "builtin_task_sync_stub",
"fallback_reason": str(stderr or "")[:4000],
},
)
task_id = str(payload.get("task_id") or "").strip()
key_basis = f"{op}:{task_id}:{payload.get('trigger_message_id') or payload.get('origin_message_id') or ''}"
approval_key = sha1(key_basis.encode("utf-8")).hexdigest()[:12]
summary = "Codex approval required (builtin stub fallback)"
return ProviderResult(
ok=True,
external_key=external_key,
payload={
"op": op,
"status": "requires_approval",
"requires_approval": True,
"summary": summary,
"approval_key": approval_key,
"permission_request": {
"summary": summary,
"requested_permissions": ["workspace_write"],
},
"resume_payload": {
"task_id": task_id,
"op": op,
},
"fallback_mode": "builtin_task_sync_stub",
"fallback_reason": str(stderr or "")[:4000],
},
)
def _run(self, config: dict, op: str, payload: dict) -> ProviderResult:
cmd = [self._command(config), "task-sync", "--op", str(op)]
base_cmd = [self._command(config), "task-sync"]
workspace = self._workspace(config)
if workspace:
cmd.extend(["--workspace", workspace])
profile = self._profile(config)
if profile:
cmd.extend(["--profile", profile])
command_timeout = self._timeout(config)
data = json.dumps(dict(payload or {}), separators=(",", ":"))
cmd.extend(["--payload-json", data])
common_args: list[str] = []
if workspace:
common_args.extend(["--workspace", workspace])
if profile:
common_args.extend(["--profile", profile])
primary_cmd = [*base_cmd, "--op", str(op), *common_args, "--payload-json", data]
fallback_cmd = [*base_cmd, str(op), *common_args, "--payload-json", data]
try:
completed = subprocess.run(
cmd,
primary_cmd,
capture_output=True,
text=True,
timeout=command_timeout,
check=False,
cwd=workspace if workspace else None,
)
stderr_probe = str(completed.stderr or "").lower()
if completed.returncode != 0 and "unexpected argument '--op'" in stderr_probe:
completed = subprocess.run(
fallback_cmd,
capture_output=True,
text=True,
timeout=command_timeout,
check=False,
cwd=workspace if workspace else None,
)
except subprocess.TimeoutExpired:
return ProviderResult(
ok=False,
@@ -90,6 +170,8 @@ class CodexCLITaskProvider(TaskProvider):
"requires_approval": requires_approval,
}
out_payload.update(parsed)
if (not ok) and self._is_task_sync_contract_mismatch(stderr):
return self._builtin_stub_result(op, dict(payload or {}), stderr)
return ProviderResult(ok=ok, external_key=ext, error=("" if ok else stderr[:4000]), payload=out_payload)
def healthcheck(self, config: dict) -> ProviderResult:

View File

@@ -5,6 +5,21 @@
<p class="subtitle is-6">{{ service_label }}</p>
<article class="box">
<h2 class="title is-6">Create Or Map Project</h2>
{% if primary_project %}
<form method="post" style="margin-bottom: 0.7rem;">
{% csrf_token %}
<input type="hidden" name="action" value="group_project_rename">
<div class="columns is-multiline">
<div class="column is-7">
<label class="label is-size-7">Rename Current Chat Project</label>
<input class="input is-small" name="project_name" value="{{ primary_project.name }}">
</div>
<div class="column is-5" style="display:flex; align-items:flex-end;">
<button class="button is-small is-light" type="submit">Rename</button>
</div>
</div>
</form>
{% endif %}
<form method="post" style="margin-bottom: 0.7rem;">
{% csrf_token %}
<input type="hidden" name="action" value="group_project_create">

View File

@@ -14,7 +14,14 @@
<h2 class="title is-6" style="margin: 0;">Projects</h2>
<span class="tag task-ui-badge">{{ projects|length }}</span>
</div>
<p class="help" style="margin-bottom: 0.45rem;">Create the project first, then map linked identifiers below in one click.</p>
<p class="help" style="margin-bottom: 0.45rem;">Projects are created automatically from chat usage. Use this panel for manual cleanup and mapping.</p>
<div class="buttons" style="margin-bottom:0.55rem;">
{% if show_empty_projects %}
<a class="button is-small is-light" href="{% url 'tasks_hub' %}">Hide empty projects</a>
{% else %}
<a class="button is-small is-light" href="{% url 'tasks_hub' %}?show_empty=1">Show empty projects</a>
{% endif %}
</div>
<form method="post" style="margin-bottom: 0.75rem;">
{% csrf_token %}
<input type="hidden" name="action" value="project_create">
@@ -51,7 +58,7 @@
<div class="select is-small is-fullwidth">
<select name="project">
<option value="">Select project</option>
{% for project in projects %}
{% for project in project_choices %}
<option value="{{ project.id }}" {% if selected_project and selected_project.id == project.id %}selected{% endif %}>{{ project.name }}</option>
{% endfor %}
</select>
@@ -115,10 +122,11 @@
<span class="tag task-ui-badge">{{ project.epic_count }} epic{{ project.epic_count|pluralize }}</span>
</td>
<td class="has-text-right">
<form method="post">
<form method="post" onsubmit="const v=prompt('Type {{ project.name|escapejs }} to confirm delete'); if(v===null){return false;} this.confirm_name.value=v; return true;">
{% csrf_token %}
<input type="hidden" name="action" value="project_delete">
<input type="hidden" name="project_id" value="{{ project.id }}">
<input type="hidden" name="confirm_name" value="">
<button class="button is-small is-danger is-light" type="submit">Delete</button>
</form>
</td>

View File

@@ -5,9 +5,10 @@
<h1 class="title is-4">Project: {{ project.name }}</h1>
<div class="buttons" style="margin-bottom: 0.75rem;">
<a class="button is-small is-light" href="{% url 'tasks_hub' %}">Back</a>
<form method="post">
<form method="post" onsubmit="const v=prompt('Type {{ project.name|escapejs }} to confirm delete'); if(v===null){return false;} this.confirm_name.value=v; return true;">
{% csrf_token %}
<input type="hidden" name="action" value="project_delete">
<input type="hidden" name="confirm_name" value="">
<button class="button is-small is-danger is-light" type="submit">Delete Project</button>
</form>
</div>

View File

@@ -130,7 +130,7 @@
<span class="icon is-small"><i class="fa-solid fa-chart-line"></i></span>
<span>Quick Insights</span>
</button>
<a class="button is-light is-rounded" href="{{ tasks_hub_url }}">
<a class="button is-light is-rounded" href="{{ tasks_group_url }}">
<span class="icon is-small"><i class="fa-solid fa-list-check"></i></span>
<span>Tasks</span>
</a>

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from pathlib import Path
from django.test import SimpleTestCase
class AdapterBoundaryRulesTests(SimpleTestCase):
def test_client_adapters_do_not_import_business_engines(self):
clients_dir = Path(__file__).resolve().parents[1] / "clients"
banned_prefixes = (
"from core.commands",
"import core.commands",
"from core.tasks",
"import core.tasks",
"from core.assist",
"import core.assist",
"from core.translation",
"import core.translation",
)
violations = []
for file_path in sorted(clients_dir.glob("*.py")):
if file_path.name == "__init__.py":
continue
content = file_path.read_text(encoding="utf-8")
for line_no, line in enumerate(content.splitlines(), start=1):
stripped = line.strip()
if any(stripped.startswith(prefix) for prefix in banned_prefixes):
violations.append(f"{file_path.name}:{line_no}: {stripped}")
self.assertEqual(
[],
violations,
msg=(
"Adapter modules must stay translator-only and must not import "
"business policy/task/assist/translation engines directly."
),
)

View File

@@ -71,3 +71,86 @@ class CodexCLITaskProviderTests(SimpleTestCase):
self.assertTrue(result.ok)
self.assertTrue(bool((result.payload or {}).get("requires_approval")))
self.assertEqual("requires_approval", (result.payload or {}).get("parsed_status"))
@patch("core.tasks.providers.codex_cli.subprocess.run")
def test_retries_with_positional_op_when_flag_unsupported(self, run_mock):
run_mock.side_effect = [
CompletedProcess(
args=[],
returncode=2,
stdout="",
stderr="error: unexpected argument '--op' found",
),
CompletedProcess(
args=[],
returncode=0,
stdout='{"status":"ok","external_key":"cx-42"}',
stderr="",
),
]
result = self.provider.create_task({"command": "codex"}, {"task_id": "t1"})
self.assertTrue(result.ok)
self.assertEqual("cx-42", result.external_key)
self.assertEqual(2, run_mock.call_count)
first = run_mock.call_args_list[0].args[0]
second = run_mock.call_args_list[1].args[0]
self.assertIn("--op", first)
self.assertNotIn("--op", second)
self.assertEqual(["codex", "task-sync", "create"], second[:3])
@patch("core.tasks.providers.codex_cli.subprocess.run")
def test_falls_back_to_builtin_approval_stub_when_no_task_sync_contract(self, run_mock):
run_mock.side_effect = [
CompletedProcess(
args=[],
returncode=2,
stdout="",
stderr="error: unexpected argument '--op' found",
),
CompletedProcess(
args=[],
returncode=2,
stdout="",
stderr="error: unrecognized subcommand 'create'\nUsage: codex [OPTIONS] [PROMPT]",
),
]
result = self.provider.create_task(
{"command": "codex"},
{
"task_id": "t1",
"trigger_message_id": "m1",
"mode": "default",
},
)
self.assertTrue(result.ok)
self.assertTrue(bool((result.payload or {}).get("requires_approval")))
self.assertEqual("requires_approval", str((result.payload or {}).get("status") or ""))
self.assertEqual("builtin_task_sync_stub", str((result.payload or {}).get("fallback_mode") or ""))
@patch("core.tasks.providers.codex_cli.subprocess.run")
def test_builtin_stub_approval_response_returns_ok(self, run_mock):
run_mock.side_effect = [
CompletedProcess(
args=[],
returncode=2,
stdout="",
stderr="error: unexpected argument '--op' found",
),
CompletedProcess(
args=[],
returncode=2,
stdout="",
stderr="error: unexpected argument 'append_update' found\nUsage: codex [OPTIONS] [PROMPT]",
),
]
result = self.provider.append_update(
{"command": "codex"},
{
"task_id": "t1",
"mode": "approval_response",
"approval_key": "abc123",
},
)
self.assertTrue(result.ok)
self.assertFalse(bool((result.payload or {}).get("requires_approval")))
self.assertEqual("ok", str((result.payload or {}).get("status") or ""))

View File

@@ -123,10 +123,17 @@ class CodexCommandExecutionTests(TestCase):
self.assertTrue(results[0].ok)
run = CodexRun.objects.order_by("-created_at").first()
self.assertIsNotNone(run)
self.assertEqual("queued", run.status)
self.assertEqual("waiting_approval", run.status)
event = ExternalSyncEvent.objects.order_by("-created_at").first()
self.assertEqual("pending", event.status)
self.assertEqual("waiting_approval", event.status)
self.assertEqual("default", str((event.payload or {}).get("provider_payload", {}).get("mode") or ""))
self.assertTrue(
CodexPermissionRequest.objects.filter(
user=self.user,
codex_run=run,
status="pending",
).exists()
)
def test_plan_requires_reply_anchor(self):
trigger = self._msg("#codex plan# #1")
@@ -145,6 +152,14 @@ class CodexCommandExecutionTests(TestCase):
self.assertEqual("reply_required_for_codex_plan", results[0].error)
def test_approve_command_queues_resume_event(self):
waiting_event = ExternalSyncEvent.objects.create(
user=self.user,
task=self.task,
provider="codex_cli",
status="waiting_approval",
payload={},
error="",
)
run = CodexRun.objects.create(
user=self.user,
task=self.task,
@@ -158,6 +173,7 @@ class CodexCommandExecutionTests(TestCase):
req = CodexPermissionRequest.objects.create(
user=self.user,
codex_run=run,
external_sync_event=waiting_event,
approval_key="ak-123",
summary="Need approval",
requested_permissions={"items": ["write"]},
@@ -186,8 +202,69 @@ class CodexCommandExecutionTests(TestCase):
self.assertTrue(results[0].ok)
req.refresh_from_db()
run.refresh_from_db()
waiting_event.refresh_from_db()
self.assertEqual("approved", req.status)
self.assertEqual("approved_waiting_resume", run.status)
self.assertEqual("ok", waiting_event.status)
self.assertTrue(
ExternalSyncEvent.objects.filter(idempotency_key="codex_approval:ak-123:approved", status="pending").exists()
)
def test_approve_pre_submit_request_queues_original_action(self):
waiting_event = ExternalSyncEvent.objects.create(
user=self.user,
task=self.task,
provider="codex_cli",
status="waiting_approval",
payload={},
error="",
)
run = CodexRun.objects.create(
user=self.user,
task=self.task,
project=self.project,
source_service="web",
source_channel="web-chan-1",
status="waiting_approval",
request_payload={"action": "append_update", "provider_payload": {"task_id": str(self.task.id)}},
result_payload={},
)
CodexPermissionRequest.objects.create(
user=self.user,
codex_run=run,
external_sync_event=waiting_event,
approval_key="pre-ak-1",
summary="pre submit",
requested_permissions={"type": "pre_submit"},
resume_payload={
"gate_type": "pre_submit",
"action": "append_update",
"provider_payload": {"task_id": str(self.task.id), "mode": "default"},
"idempotency_key": "codex_cmd:resume:1",
},
status="pending",
)
CommandChannelBinding.objects.get_or_create(
profile=self.profile,
direction="ingress",
service="web",
channel_identifier="approver-chan",
defaults={"enabled": True},
)
trigger = self._msg(".codex approve pre-ak-1", source_chat_id="approver-chan")
results = async_to_sync(process_inbound_message)(
CommandContext(
service="web",
channel_identifier="approver-chan",
message_id=str(trigger.id),
user_id=self.user.id,
message_text=str(trigger.text),
payload={},
)
)
self.assertEqual(1, len(results))
self.assertTrue(results[0].ok)
resume = ExternalSyncEvent.objects.filter(idempotency_key="codex_cmd:resume:1").first()
self.assertIsNotNone(resume)
self.assertEqual("pending", resume.status)
self.assertEqual("append_update", str((resume.payload or {}).get("action") or ""))

View File

@@ -121,3 +121,62 @@ class CodexWorkerPhase1Tests(TestCase):
request = CodexPermissionRequest.objects.get(approval_key="ak-worker-1")
self.assertEqual("pending", request.status)
self.assertEqual(str(run.id), str(request.codex_run_id))
@patch("core.management.commands.codex_worker.get_provider")
def test_approval_response_marks_original_waiting_event_ok(self, get_provider_mock):
waiting_event = ExternalSyncEvent.objects.create(
user=self.user,
provider="codex_cli",
status="waiting_approval",
payload={"action": "append_update", "provider_payload": {"mode": "default"}},
error="",
)
run = CodexRun.objects.create(
user=self.user,
project=self.project,
source_service="web",
source_channel="web-chan-1",
status="approved_waiting_resume",
request_payload={},
result_payload={},
)
CodexPermissionRequest.objects.create(
user=self.user,
codex_run=run,
external_sync_event=waiting_event,
approval_key="ak-worker-ok",
summary="needs permissions",
requested_permissions={"items": ["write"]},
resume_payload={"resume": True},
status="approved",
)
resume_event = ExternalSyncEvent.objects.create(
user=self.user,
provider="codex_cli",
status="pending",
payload={
"action": "append_update",
"provider_payload": {
"mode": "approval_response",
"approval_key": "ak-worker-ok",
"codex_run_id": str(run.id),
},
},
error="",
)
class _Provider:
run_in_worker = True
def append_update(self, config, payload):
return ProviderResult(ok=True, payload={"status": "ok", "summary": "resumed"})
create_task = mark_complete = link_task = append_update
get_provider_mock.return_value = _Provider()
CodexWorkerCommand()._run_event(resume_event)
waiting_event.refresh_from_db()
resume_event.refresh_from_db()
self.assertEqual("ok", resume_event.status)
self.assertEqual("ok", waiting_event.status)

View File

@@ -53,3 +53,26 @@ class CommandRoutingVariantUITests(TestCase):
row = self.profile.variant_policies.get(variant_key="bp_set")
self.assertEqual("ai", row.generation_mode)
self.assertTrue(row.send_status_to_egress)
def test_variant_policy_update_preserves_source_status_toggle(self):
response = self.client.post(
reverse("command_routing"),
{
"action": "variant_policy_update",
"profile_id": str(self.profile.id),
"variant_key": "bp_set",
"enabled": "1",
"generation_mode": "verbatim",
# Intentionally omit send_status_to_source to set it False.
},
follow=True,
)
self.assertEqual(200, response.status_code)
row = self.profile.variant_policies.get(variant_key="bp_set")
self.assertFalse(row.send_status_to_source)
# Rendering the page should not overwrite user policy decisions.
response = self.client.get(reverse("command_routing"))
self.assertEqual(200, response.status_code)
row.refresh_from_db()
self.assertFalse(row.send_status_to_source)

View File

@@ -187,6 +187,36 @@ class ComposeReactTests(TestCase):
response.json(),
)
@patch("core.views.compose.transport.send_reaction", new_callable=AsyncMock)
def test_whatsapp_web_local_message_without_bridge_is_unresolvable(
self, mocked_send_reaction
):
person, _, message = self._build_message(
service="whatsapp",
identifier="12345@s.whatsapp.net",
source_message_id="1771234567000",
)
message.source_service = "web"
message.save(update_fields=["source_service"])
response = self.client.post(
reverse("compose_react"),
{
"service": "whatsapp",
"identifier": "12345@s.whatsapp.net",
"person": str(person.id),
"message_id": str(message.id),
"emoji": "😮",
},
)
self.assertEqual(200, response.status_code)
self.assertEqual(
{"ok": False, "error": "whatsapp_target_unresolvable"},
response.json(),
)
mocked_send_reaction.assert_not_awaited()
def test_compose_page_renders_reaction_actions_for_signal(self):
person, _, _ = self._build_message(
service="signal",

View File

@@ -0,0 +1,62 @@
from django.test import TestCase, override_settings
from core.events.ledger import append_event_sync
from core.models import ChatSession, ConversationEvent, Person, PersonIdentifier, User
@override_settings(EVENT_LEDGER_DUAL_WRITE=True)
class EventLedgerTests(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username="ledger-user",
email="ledger@example.com",
password="x",
)
self.person = Person.objects.create(user=self.user, name="Ledger Person")
self.identifier = PersonIdentifier.objects.create(
user=self.user,
person=self.person,
service="signal",
identifier="+15555550123",
)
self.session = ChatSession.objects.create(
user=self.user,
identifier=self.identifier,
)
def test_append_event_creates_row(self):
row = append_event_sync(
user=self.user,
session=self.session,
ts=1234,
event_type="message_created",
direction="in",
origin_transport="signal",
origin_message_id="abc",
payload={"text": "hello"},
)
self.assertIsNotNone(row)
self.assertEqual(1, ConversationEvent.objects.count())
def test_append_event_is_idempotent_for_same_origin_and_type(self):
append_event_sync(
user=self.user,
session=self.session,
ts=1234,
event_type="message_created",
direction="in",
origin_transport="signal",
origin_message_id="dup-1",
payload={"text": "hello"},
)
append_event_sync(
user=self.user,
session=self.session,
ts=1235,
event_type="message_created",
direction="in",
origin_transport="signal",
origin_message_id="dup-1",
payload={"text": "hello again"},
)
self.assertEqual(1, ConversationEvent.objects.count())

View File

@@ -0,0 +1,47 @@
from io import StringIO
from django.core.management import call_command
from django.test import TestCase, override_settings
from core.events.ledger import append_event_sync
from core.models import ChatSession, Person, PersonIdentifier, User
@override_settings(EVENT_LEDGER_DUAL_WRITE=True)
class EventLedgerSmokeCommandTests(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username="ledger-smoke-user",
email="ledger-smoke@example.com",
password="pw",
)
person = Person.objects.create(user=self.user, name="Smoke Person")
identifier = PersonIdentifier.objects.create(
user=self.user,
person=person,
service="signal",
identifier="+15550001111",
)
self.session = ChatSession.objects.create(user=self.user, identifier=identifier)
def test_smoke_command_reports_recent_rows(self):
append_event_sync(
user=self.user,
session=self.session,
ts=1770000000000,
event_type="message_created",
direction="in",
origin_transport="signal",
origin_message_id="abc",
payload={"message_id": "m1"},
)
out = StringIO()
call_command(
"event_ledger_smoke",
user_id=str(self.user.id),
minutes=999999,
stdout=out,
)
rendered = out.getvalue()
self.assertIn("event-ledger-smoke", rendered)
self.assertIn("event_type_counts=", rendered)

View File

@@ -0,0 +1,132 @@
from io import StringIO
import time
from django.core.management import call_command
from django.test import TestCase, override_settings
from core.events.ledger import append_event_sync
from core.events.projection import shadow_compare_session
from core.models import ChatSession, Message, Person, PersonIdentifier, User
@override_settings(EVENT_LEDGER_DUAL_WRITE=True)
class EventProjectionShadowTests(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username="projection-user",
email="projection@example.com",
password="x",
)
self.person = Person.objects.create(user=self.user, name="Projection Person")
self.identifier = PersonIdentifier.objects.create(
user=self.user,
person=self.person,
service="signal",
identifier="+15555550333",
)
self.session = ChatSession.objects.create(user=self.user, identifier=self.identifier)
def test_shadow_compare_has_zero_mismatch_when_projection_matches(self):
message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000000000,
sender_uuid="+15555550333",
text="hello",
delivered_ts=1700000000000,
read_ts=1700000000500,
receipt_payload={
"reactions": [
{
"source_service": "signal",
"actor": "user:1:signal",
"emoji": "👍",
"removed": False,
}
]
},
)
append_event_sync(
user=self.user,
session=self.session,
ts=1700000000000,
event_type="message_created",
direction="in",
origin_transport="signal",
origin_message_id=str(message.id),
payload={"message_id": str(message.id), "text": "hello"},
)
append_event_sync(
user=self.user,
session=self.session,
ts=1700000000500,
event_type="read_receipt",
direction="system",
origin_transport="signal",
origin_message_id=str(message.id),
payload={"message_id": str(message.id), "read_ts": 1700000000500},
)
append_event_sync(
user=self.user,
session=self.session,
ts=1700000000600,
event_type="reaction_added",
direction="system",
actor_identifier="user:1:signal",
origin_transport="signal",
origin_message_id=str(message.id),
payload={
"message_id": str(message.id),
"emoji": "👍",
"source_service": "signal",
"actor": "user:1:signal",
},
)
compared = shadow_compare_session(self.session, detail_limit=10)
self.assertEqual(0, compared["mismatch_total"])
def test_shadow_compare_detects_missing_projection_row(self):
Message.objects.create(
user=self.user,
session=self.session,
ts=1700000000000,
sender_uuid="+15555550333",
text="no-event",
)
compared = shadow_compare_session(self.session, detail_limit=10)
self.assertGreater(compared["counters"]["missing_in_projection"], 0)
def test_management_command_emits_summary(self):
out = StringIO()
call_command(
"event_projection_shadow",
user_id=str(self.user.id),
limit_sessions=5,
stdout=out,
)
rendered = out.getvalue()
self.assertIn("shadow compare:", rendered)
self.assertIn("cause_counts=", rendered)
def test_management_command_supports_service_and_recent_filters(self):
Message.objects.create(
user=self.user,
session=self.session,
ts=int(time.time() * 1000),
sender_uuid="+15550000000",
text="recent",
source_service="signal",
source_message_id="recent-1",
)
out = StringIO()
call_command(
"event_projection_shadow",
user_id=str(self.user.id),
service="signal",
recent_minutes=120,
limit_sessions=5,
stdout=out,
)
rendered = out.getvalue()
self.assertIn("shadow compare:", rendered)

View File

@@ -8,6 +8,7 @@ from core.commands.engine import _matches_trigger, process_inbound_message
from core.messaging.reply_sync import extract_reply_ref, resolve_reply_target
from core.views.compose import _command_options_for_channel
from core.models import (
ChatTaskSource,
ChatSession,
CommandAction,
CommandChannelBinding,
@@ -362,6 +363,14 @@ class Phase1CommandEngineTests(TestCase):
).exists()
self.assertTrue(ingress_exists)
self.assertTrue(egress_exists)
self.assertTrue(
ChatTaskSource.objects.filter(
user=self.user,
service="signal",
channel_identifier="+15550000002",
enabled=True,
).exists()
)
def test_first_user_bp_command_auto_setup_is_idempotent(self):
CommandProfile.objects.filter(user=self.user, slug="bp").delete()

View File

@@ -0,0 +1,126 @@
from __future__ import annotations
from asgiref.sync import async_to_sync
from django.test import TestCase
from core.messaging import history
from core.models import ChatSession, Message, Person, PersonIdentifier, User
from core.views.compose import _serialize_message
class ReactionNormalizationTests(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username="react-normalize",
email="react-normalize@example.com",
password="pw",
)
self.person = Person.objects.create(user=self.user, name="Reaction Person")
self.identifier = PersonIdentifier.objects.create(
user=self.user,
person=self.person,
service="signal",
identifier="+15551239999",
)
self.session = ChatSession.objects.create(
user=self.user,
identifier=self.identifier,
)
def test_apply_reaction_prefers_exact_source_timestamp_match(self):
near_message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000000100,
sender_uuid="author-near",
text="near",
source_service="signal",
source_message_id="1700000000100",
)
exact_message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000000000,
sender_uuid="author-exact",
text="exact",
source_service="signal",
source_message_id="1700000000000",
)
updated = async_to_sync(history.apply_reaction)(
self.user,
self.identifier,
target_ts=1700000000000,
emoji="❤️",
source_service="signal",
actor="reactor-1",
target_author="author-exact",
remove=False,
payload={"origin": "test"},
)
self.assertEqual(str(exact_message.id), str(updated.id))
exact_message.refresh_from_db()
near_message.refresh_from_db()
self.assertEqual(1, len((exact_message.receipt_payload or {}).get("reactions") or []))
self.assertEqual(
"exact_source_message_id_ts",
str((exact_message.receipt_payload or {}).get("reaction_last_match_strategy") or ""),
)
self.assertEqual(0, len((near_message.receipt_payload or {}).get("reactions") or []))
def test_remove_without_emoji_is_audited_not_active(self):
message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000001000,
sender_uuid="author-1",
text="msg",
source_service="signal",
source_message_id="1700000001000",
)
async_to_sync(history.apply_reaction)(
self.user,
self.identifier,
target_ts=1700000001000,
emoji="",
source_service="whatsapp",
actor="actor-1",
remove=True,
payload={"origin": "test"},
)
message.refresh_from_db()
payload = dict(message.receipt_payload or {})
self.assertEqual([], list(payload.get("reactions") or []))
self.assertEqual(1, len(list(payload.get("reaction_events") or [])))
def test_emoji_only_reply_text_is_not_reaction(self):
anchor = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000002000,
sender_uuid="author-1",
text="anchor",
source_service="signal",
source_message_id="1700000002000",
)
heart_reply = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000003000,
sender_uuid="author-2",
text="❤️",
source_service="signal",
source_message_id="1700000003000",
reply_to=anchor,
reply_source_service="signal",
reply_source_message_id="1700000002000",
receipt_payload={},
)
serialized = _serialize_message(heart_reply)
self.assertEqual("❤️", serialized["text"])
self.assertEqual([], list(serialized.get("reactions") or []))
self.assertEqual(str(anchor.id), serialized["reply_to_id"])

View File

@@ -211,6 +211,119 @@ class TaskSettingsViewActionsTests(TestCase):
)
@override_settings(TASK_DERIVATION_USE_AI=False)
class TaskAutoBootstrapTests(TestCase):
def setUp(self):
self.user = User.objects.create_user("task-auto-user", "task-auto@example.com", "x")
self.person = Person.objects.create(user=self.user, name="Bootstrap Chat")
self.identifier = PersonIdentifier.objects.create(
user=self.user,
person=self.person,
service="whatsapp",
identifier="120363402761690215@g.us",
)
self.session = ChatSession.objects.create(user=self.user, identifier=self.identifier)
def test_task_message_auto_creates_project_and_source(self):
msg = Message.objects.create(
user=self.user,
session=self.session,
sender_uuid="peer",
text="task: ship alpha",
ts=1000,
source_service="whatsapp",
source_chat_id="120363402761690215@g.us",
)
with patch("core.tasks.engine.send_message_raw", new=AsyncMock()):
async_to_sync(process_inbound_task_intelligence)(msg)
source = ChatTaskSource.objects.filter(
user=self.user,
service="whatsapp",
channel_identifier="120363402761690215@g.us",
enabled=True,
).first()
self.assertIsNotNone(source)
self.assertTrue(TaskProject.objects.filter(user=self.user, id=source.project_id).exists())
self.assertEqual(1, DerivedTask.objects.filter(user=self.user).count())
class TaskProjectDeleteGuardTests(TestCase):
def setUp(self):
self.user = User.objects.create_user("task-delete-user", "task-delete@example.com", "x")
self.client.force_login(self.user)
self.project = TaskProject.objects.create(user=self.user, name="Delete Me")
self.source = ChatTaskSource.objects.create(
user=self.user,
service="signal",
channel_identifier="+15550000001",
project=self.project,
enabled=True,
)
def test_project_delete_requires_exact_confirmation(self):
response = self.client.post(
reverse("tasks_hub"),
{
"action": "project_delete",
"project_id": str(self.project.id),
"confirm_name": "wrong",
},
follow=True,
)
self.assertEqual(200, response.status_code)
self.assertTrue(TaskProject.objects.filter(id=self.project.id, user=self.user).exists())
def test_project_delete_reseeds_default_mapping(self):
response = self.client.post(
reverse("tasks_hub"),
{
"action": "project_delete",
"project_id": str(self.project.id),
"confirm_name": "Delete Me",
},
follow=True,
)
self.assertEqual(200, response.status_code)
self.assertFalse(TaskProject.objects.filter(id=self.project.id, user=self.user).exists())
self.assertTrue(
ChatTaskSource.objects.filter(
user=self.user,
service="signal",
channel_identifier="+15550000001",
enabled=True,
).exists()
)
class TaskHubEmptyProjectVisibilityTests(TestCase):
def setUp(self):
self.user = User.objects.create_user("task-hub-user", "task-hub@example.com", "x")
self.client.force_login(self.user)
self.empty = TaskProject.objects.create(user=self.user, name="Empty")
self.used = TaskProject.objects.create(user=self.user, name="Used")
DerivedTask.objects.create(
user=self.user,
project=self.used,
title="Visible Task",
source_service="web",
source_channel="web-1",
reference_code="1",
status_snapshot="open",
)
def test_tasks_hub_hides_empty_projects_by_default(self):
response = self.client.get(reverse("tasks_hub"))
self.assertEqual(200, response.status_code)
projects = list(response.context["projects"])
self.assertEqual(["Used"], [str(row.name) for row in projects])
def test_tasks_hub_can_show_empty_projects(self):
response = self.client.get(reverse("tasks_hub"), {"show_empty": "1"})
self.assertEqual(200, response.status_code)
names = sorted(str(row.name) for row in response.context["projects"])
self.assertEqual(["Empty", "Used"], names)
class TaskSettingsExternalChatLinkScopeTests(TestCase):
def setUp(self):
self.user = User.objects.create_user("task-link-user", "task-link@example.com", "x")
@@ -331,10 +444,29 @@ class CodexSettingsAndSubmitTests(TestCase):
follow=True,
)
self.assertEqual(200, response.status_code)
self.assertTrue(CodexRun.objects.filter(user=self.user, task=self.task).exists())
self.assertTrue(ExternalSyncEvent.objects.filter(user=self.user, task=self.task, provider="codex_cli").exists())
run = CodexRun.objects.filter(user=self.user, task=self.task).order_by("-created_at").first()
self.assertIsNotNone(run)
self.assertEqual("waiting_approval", str(getattr(run, "status", "")))
event = ExternalSyncEvent.objects.filter(user=self.user, task=self.task, provider="codex_cli").order_by("-created_at").first()
self.assertIsNotNone(event)
self.assertEqual("waiting_approval", str(getattr(event, "status", "")))
self.assertTrue(
CodexPermissionRequest.objects.filter(
user=self.user,
codex_run=run,
status="pending",
).exists()
)
def test_codex_settings_page_and_approval_action(self):
waiting_event = ExternalSyncEvent.objects.create(
user=self.user,
task=self.task,
provider="codex_cli",
status="waiting_approval",
payload={},
error="",
)
run = CodexRun.objects.create(
user=self.user,
task=self.task,
@@ -348,6 +480,7 @@ class CodexSettingsAndSubmitTests(TestCase):
req = CodexPermissionRequest.objects.create(
user=self.user,
codex_run=run,
external_sync_event=waiting_event,
approval_key="approve-me",
summary="need approval",
requested_permissions={"items": ["write"]},
@@ -365,5 +498,7 @@ class CodexSettingsAndSubmitTests(TestCase):
self.assertEqual(200, response.status_code)
req.refresh_from_db()
run.refresh_from_db()
waiting_event.refresh_from_db()
self.assertEqual("approved", req.status)
self.assertEqual("approved_waiting_resume", run.status)
self.assertEqual("ok", waiting_event.status)

View File

@@ -0,0 +1,21 @@
from django.test import SimpleTestCase
from core.observability.tracing import ensure_trace_id
class TracingHelpersTests(SimpleTestCase):
def test_ensure_trace_id_prefers_explicit_value(self):
self.assertEqual(
"abc123",
ensure_trace_id("abc123", {"trace_id": "payload"}),
)
def test_ensure_trace_id_uses_payload_value(self):
self.assertEqual(
"payload-value",
ensure_trace_id("", {"trace_id": "payload-value"}),
)
def test_ensure_trace_id_generates_when_missing(self):
trace_id = ensure_trace_id("", {})
self.assertEqual(32, len(trace_id))

View File

@@ -0,0 +1,17 @@
from django.test import SimpleTestCase
from core.transports.capabilities import capability_snapshot, supports, unsupported_reason
class TransportCapabilitiesTests(SimpleTestCase):
def test_signal_reactions_supported(self):
self.assertTrue(supports("signal", "reactions"))
def test_instagram_reactions_not_supported(self):
self.assertFalse(supports("instagram", "reactions"))
self.assertIn("instagram does not support reactions", unsupported_reason("instagram", "reactions"))
def test_snapshot_has_schema_version(self):
snapshot = capability_snapshot()
self.assertIn("schema_version", snapshot)
self.assertIn("services", snapshot)

View File

@@ -0,0 +1,7 @@
from core.transports.capabilities import (
capability_snapshot,
supports,
unsupported_reason,
)
__all__ = ["supports", "unsupported_reason", "capability_snapshot"]

View File

@@ -0,0 +1,100 @@
from __future__ import annotations
CAPABILITY_SCHEMA_VERSION = 1
_TRANSPORT_CAPABILITIES: dict[str, dict[str, bool]] = {
"signal": {
"send": True,
"reactions": True,
"edits": False,
"deletes": False,
"threaded_replies": True,
"typing": True,
"read_receipts": True,
"media_images": True,
"media_video": True,
"media_audio": True,
"media_documents": True,
},
"whatsapp": {
"send": True,
"reactions": True,
"edits": False,
"deletes": False,
"threaded_replies": True,
"typing": True,
"read_receipts": True,
"media_images": True,
"media_video": True,
"media_audio": True,
"media_documents": True,
},
"instagram": {
"send": True,
"reactions": False,
"edits": False,
"deletes": False,
"threaded_replies": False,
"typing": True,
"read_receipts": False,
"media_images": True,
"media_video": True,
"media_audio": False,
"media_documents": False,
},
"xmpp": {
"send": False,
"reactions": False,
"edits": False,
"deletes": False,
"threaded_replies": False,
"typing": False,
"read_receipts": False,
"media_images": False,
"media_video": False,
"media_audio": False,
"media_documents": False,
},
}
def _service_key(service: str) -> str:
return str(service or "").strip().lower()
def _capabilities_for(service: str) -> dict[str, bool]:
defaults = _TRANSPORT_CAPABILITIES.get(_service_key(service))
if defaults is None:
return {}
return dict(defaults)
def supports(service: str, feature: str) -> bool:
feature_key = str(feature or "").strip().lower()
if not feature_key:
return False
return bool(_capabilities_for(service).get(feature_key, False))
def unsupported_reason(service: str, feature: str) -> str:
if supports(service, feature):
return ""
service_key = _service_key(service) or "unknown"
feature_key = str(feature or "").strip().lower() or "requested_action"
return f"{service_key} does not support {feature_key}."
def capability_snapshot(service: str = "") -> dict:
if service:
key = _service_key(service)
return {
"schema_version": CAPABILITY_SCHEMA_VERSION,
"service": key,
"capabilities": _capabilities_for(key),
}
return {
"schema_version": CAPABILITY_SCHEMA_VERSION,
"services": {
key: dict(value) for key, value in sorted(_TRANSPORT_CAPABILITIES.items())
},
}

View File

@@ -30,6 +30,7 @@ from core.assist.engine import process_inbound_assist
from core.commands.base import CommandContext
from core.commands.engine import process_inbound_message
from core.commands.policies import ensure_variant_policies_for_profile
from core.events.ledger import append_event_sync
from core.messaging import ai as ai_runner
from core.messaging import history
from core.messaging import media_bridge
@@ -53,6 +54,7 @@ from core.models import (
from core.presence import get_settings as get_availability_settings
from core.presence import spans_for_range
from core.realtime.typing_state import get_person_typing_state
from core.transports.capabilities import supports, unsupported_reason
from core.translation.engine import process_inbound_translation
from core.views.workspace import (
INSIGHT_METRICS,
@@ -516,7 +518,8 @@ def _serialize_message(msg: Message) -> dict:
emoji = str(item.get("emoji") or "").strip()
if not emoji:
continue
actor = str(item.get("actor") or "").strip()
# Keep actor/source normalization stable to avoid duplicate/hiding issues.
actor = str(item.get("actor") or "").strip().lower()
source = str(item.get("source_service") or "").strip().lower()
key = (emoji, actor, source)
if key in seen_reactions:
@@ -1811,6 +1814,7 @@ def _reaction_actor_key(user_id, service: str) -> str:
def _resolve_reaction_target(message: Message, service: str, channel_identifier: str) -> dict:
service_key = _default_service(service)
message_source_service = str(getattr(message, "source_service", "") or "").strip().lower()
source_message_id = str(getattr(message, "source_message_id", "") or "").strip()
sender_uuid = str(getattr(message, "sender_uuid", "") or "").strip()
source_chat_id = str(getattr(message, "source_chat_id", "") or "").strip()
@@ -1819,23 +1823,23 @@ def _resolve_reaction_target(message: Message, service: str, channel_identifier:
if service_key == "signal":
target_ts = 0
if source_message_id.isdigit():
if message_source_service == "signal" and source_message_id.isdigit():
target_ts = int(source_message_id)
bridge_ref = _latest_signal_bridge_ref(message)
if not target_ts:
bridge_ref = _latest_signal_bridge_ref(message)
upstream_id = str(bridge_ref.get("upstream_message_id") or "").strip()
if upstream_id.isdigit():
target_ts = int(upstream_id)
if not target_ts:
target_ts = int(bridge_ref.get("upstream_ts") or 0)
if not target_ts:
# Local web messages are only reactable once bridge refs exist.
if not target_ts and message_source_service == "signal":
target_ts = delivered_ts or local_ts
if target_ts <= 0:
return {"error": "signal_target_unresolvable"}
target_author = sender_uuid
if not target_author:
bridge_ref = _latest_signal_bridge_ref(message)
target_author = str(bridge_ref.get("upstream_author") or "").strip()
if (
str(getattr(message, "custom_author", "") or "").strip().upper()
@@ -1856,10 +1860,10 @@ def _resolve_reaction_target(message: Message, service: str, channel_identifier:
}
if service_key == "whatsapp":
target_message_id = source_message_id
target_message_id = source_message_id if message_source_service == "whatsapp" else ""
target_ts = delivered_ts or local_ts
bridge_ref = _latest_whatsapp_bridge_ref(message)
if not target_message_id:
bridge_ref = _latest_whatsapp_bridge_ref(message)
target_message_id = str(bridge_ref.get("upstream_message_id") or "").strip()
if not target_ts:
target_ts = int(bridge_ref.get("upstream_ts") or 0)
@@ -4357,7 +4361,7 @@ class ComposeEngageSend(LoginRequiredMixin, View):
identifier=base["person_identifier"],
)
ts_value = int(ts) if str(ts).isdigit() else int(time.time() * 1000)
Message.objects.create(
created = Message.objects.create(
user=request.user,
session=session,
sender_uuid="",
@@ -4366,6 +4370,23 @@ class ComposeEngageSend(LoginRequiredMixin, View):
delivered_ts=ts_value if str(ts).isdigit() else None,
custom_author="USER",
)
try:
append_event_sync(
user=request.user,
session=session,
ts=ts_value,
event_type="message_created",
direction="out",
actor_identifier="USER",
origin_transport="web",
origin_message_id=str(created.id),
origin_chat_id=str(base["identifier"] or ""),
payload={"message_id": str(created.id), "text": outbound},
raw_payload={},
trace_id="",
)
except Exception:
pass
return JsonResponse({"ok": True, "message": "Shared engage sent."})
@@ -4519,6 +4540,23 @@ class ComposeSend(LoginRequiredMixin, View):
),
message_meta={},
)
try:
append_event_sync(
user=request.user,
session=session,
ts=int(ts),
event_type="message_created",
direction="out",
actor_identifier="USER",
origin_transport="web",
origin_message_id=str(created_message.id),
origin_chat_id=str(base["identifier"] or ""),
payload={"message_id": str(created_message.id), "text": text},
raw_payload={},
trace_id="",
)
except Exception:
pass
command_id = transport.enqueue_runtime_command(
base["service"],
"send_message_raw",
@@ -4591,6 +4629,23 @@ class ComposeSend(LoginRequiredMixin, View):
reply_source_message_id=str(reply_to.id) if reply_to is not None else None,
message_meta={},
)
try:
append_event_sync(
user=request.user,
session=session,
ts=msg_ts,
event_type="message_created",
direction="out",
actor_identifier="USER",
origin_transport="web",
origin_message_id=str(created_message.id),
origin_chat_id=str(base["identifier"] or ""),
payload={"message_id": str(created_message.id), "text": text},
raw_payload={},
trace_id="",
)
except Exception:
pass
if created_message is not None:
async_to_sync(process_inbound_message)(
CommandContext(
@@ -4643,6 +4698,14 @@ class ComposeReact(LoginRequiredMixin, View):
service_key = _default_service(service)
if service_key not in {"signal", "whatsapp"}:
return JsonResponse({"ok": False, "error": "service_not_supported"})
if bool(getattr(settings, "CAPABILITY_ENFORCEMENT_ENABLED", True)) and not supports(service_key, "reactions"):
return JsonResponse(
{
"ok": False,
"error": "unsupported_action",
"reason": unsupported_reason(service_key, "reactions"),
}
)
if not identifier and person is None:
return JsonResponse({"ok": False, "error": "missing_scope"})

View File

@@ -1,12 +1,15 @@
from django.http import JsonResponse
from django.shortcuts import render
from django.views import View
from core.models import (
AdapterHealthEvent,
AIRequest,
AIResult,
AIResultSignal,
Chat,
ChatSession,
ConversationEvent,
Group,
MemoryItem,
Message,
@@ -25,6 +28,8 @@ from core.models import (
WorkspaceConversation,
WorkspaceMetricSnapshot,
)
from core.events.projection import shadow_compare_session
from core.transports.capabilities import capability_snapshot
from core.views.manage.permissions import SuperUserRequiredMixin
@@ -37,6 +42,8 @@ class SystemSettings(SuperUserRequiredMixin, View):
"messages": Message.objects.filter(user=user).count(),
"queued_messages": QueuedMessage.objects.filter(user=user).count(),
"message_events": MessageEvent.objects.filter(user=user).count(),
"conversation_events": ConversationEvent.objects.filter(user=user).count(),
"adapter_health_events": AdapterHealthEvent.objects.filter(user=user).count(),
"workspace_conversations": WorkspaceConversation.objects.filter(
user=user
).count(),
@@ -85,6 +92,8 @@ class SystemSettings(SuperUserRequiredMixin, View):
conversation__user=user
).delete()[0]
deleted += MessageEvent.objects.filter(user=user).delete()[0]
deleted += ConversationEvent.objects.filter(user=user).delete()[0]
deleted += AdapterHealthEvent.objects.filter(user=user).delete()[0]
deleted += Message.objects.filter(user=user).delete()[0]
deleted += QueuedMessage.objects.filter(user=user).delete()[0]
deleted += WorkspaceConversation.objects.filter(user=user).delete()[0]
@@ -156,3 +165,97 @@ class SystemSettings(SuperUserRequiredMixin, View):
"notice_message": notice_message,
},
)
class ServiceCapabilitySnapshotAPI(SuperUserRequiredMixin, View):
def get(self, request):
service = str(request.GET.get("service") or "").strip().lower()
return JsonResponse(
{
"ok": True,
"data": capability_snapshot(service),
}
)
class AdapterHealthSummaryAPI(SuperUserRequiredMixin, View):
def get(self, request):
latest_by_service = {}
rows = AdapterHealthEvent.objects.order_by("service", "-ts")[:200]
for row in rows:
key = str(row.service or "").strip().lower()
if key in latest_by_service:
continue
latest_by_service[key] = {
"status": str(row.status or ""),
"reason": str(row.reason or ""),
"ts": int(row.ts or 0),
"created_at": row.created_at.isoformat(),
}
return JsonResponse({"ok": True, "services": latest_by_service})
class TraceDiagnosticsAPI(SuperUserRequiredMixin, View):
def get(self, request):
trace_id = str(request.GET.get("trace_id") or "").strip()
if not trace_id:
return JsonResponse(
{"ok": False, "error": "trace_id_required"},
status=400,
)
rows = list(
ConversationEvent.objects.filter(
user=request.user,
trace_id=trace_id,
)
.select_related("session")
.order_by("ts", "created_at")[:500]
)
return JsonResponse(
{
"ok": True,
"trace_id": trace_id,
"count": len(rows),
"events": [
{
"id": str(row.id),
"ts": int(row.ts or 0),
"event_type": str(row.event_type or ""),
"direction": str(row.direction or ""),
"session_id": str(row.session_id or ""),
"origin_transport": str(row.origin_transport or ""),
"origin_message_id": str(row.origin_message_id or ""),
"payload": dict(row.payload or {}),
}
for row in rows
],
}
)
class EventProjectionShadowAPI(SuperUserRequiredMixin, View):
def get(self, request):
session_id = str(request.GET.get("session_id") or "").strip()
if not session_id:
return JsonResponse(
{"ok": False, "error": "session_id_required"},
status=400,
)
detail_limit = int(request.GET.get("detail_limit") or 25)
session = ChatSession.objects.filter(
id=session_id,
user=request.user,
).first()
if session is None:
return JsonResponse(
{"ok": False, "error": "session_not_found"},
status=404,
)
compared = shadow_compare_session(session, detail_limit=max(0, detail_limit))
return JsonResponse(
{
"ok": True,
"result": compared,
"cause_summary": dict(compared.get("cause_counts") or {}),
}
)

View File

@@ -35,20 +35,14 @@ from core.models import (
ExternalChatLink,
)
from core.tasks.codex_support import resolve_external_chat_id
from core.tasks.chat_defaults import (
SAFE_TASK_FLAGS_DEFAULTS,
ensure_default_source_for_chat,
normalize_channel_identifier,
)
from core.tasks.codex_approval import queue_codex_event_with_pre_approval
from core.tasks.providers import get_provider
SAFE_TASK_FLAGS_DEFAULTS = {
"derive_enabled": True,
"match_mode": "strict",
"require_prefix": True,
"allowed_prefixes": ["task:", "todo:"],
"completion_enabled": True,
"ai_title_enabled": True,
"announce_task_id": False,
"min_chars": 3,
}
def _to_bool(raw, default=False) -> bool:
if raw is None:
return bool(default)
@@ -385,7 +379,7 @@ def _enqueue_codex_task_submission(
source_service=str(source_service or ""),
source_channel=str(source_channel or ""),
external_chat_id=external_chat_id,
status="queued",
status="waiting_approval",
request_payload={"action": "append_update", "provider_payload": dict(provider_payload)},
result_payload={},
error="",
@@ -396,51 +390,21 @@ def _enqueue_codex_task_submission(
idempotency_key = (
f"codex_submit:{task.id}:{mode}:{hashlib.sha1(str(command_text or '').encode('utf-8')).hexdigest()[:10]}:{run.id}"
)
ExternalSyncEvent.objects.update_or_create(
queue_codex_event_with_pre_approval(
user=user,
run=run,
task=task,
task_event=None,
action="append_update",
provider_payload=dict(provider_payload),
idempotency_key=idempotency_key,
defaults={
"user": user,
"task": task,
"task_event": None,
"provider": "codex_cli",
"status": "pending",
"payload": {
"action": "append_update",
"provider_payload": dict(provider_payload),
},
"error": "",
},
)
return run
def _normalize_channel_identifier(service: str, identifier: str) -> str:
service_key = str(service or "").strip().lower()
value = str(identifier or "").strip()
if not value:
return ""
if service_key == "whatsapp":
bare = value.split("@", 1)[0].strip()
if bare:
if value.endswith("@g.us"):
return f"{bare}@g.us"
if value.endswith("@s.whatsapp.net"):
return f"{bare}@s.whatsapp.net"
return f"{bare}@g.us"
if service_key == "signal":
return value
if service_key == "xmpp":
return value
if service_key == "instagram":
return value
if service_key == "web":
return value
return value
def _upsert_group_source(*, user, service: str, channel_identifier: str, project, epic=None):
normalized_service = str(service or "").strip().lower()
normalized_identifier = _normalize_channel_identifier(service, channel_identifier)
normalized_identifier = normalize_channel_identifier(service, channel_identifier)
if not normalized_service or not normalized_identifier:
return None
source, created = ChatTaskSource.objects.get_or_create(
@@ -503,6 +467,28 @@ def _notify_epic_created_in_project_chats(*, project: TaskProject, epic: TaskEpi
continue
def _reseed_chat_sources_for_deleted_project(user, service_channel_rows: list[tuple[str, str]]) -> int:
restored = 0
seen: set[tuple[str, str]] = set()
for service, channel_identifier in service_channel_rows:
service_key = str(service or "").strip().lower()
channel = str(channel_identifier or "").strip()
if not service_key or not channel:
continue
pair = (service_key, channel)
if pair in seen:
continue
seen.add(pair)
source = ensure_default_source_for_chat(
user=user,
service=service_key,
channel_identifier=channel,
)
if source is not None:
restored += 1
return restored
def _person_identifier_scope_variants(service: str, identifier: str) -> list[str]:
service_key = str(service or "").strip().lower()
raw_identifier = str(identifier or "").strip()
@@ -668,14 +654,17 @@ class TasksHub(LoginRequiredMixin, View):
def _context(self, request):
scope = self._scope(request)
projects = (
show_empty = bool(str(request.GET.get("show_empty") or "").strip() in {"1", "true", "yes", "on"})
all_projects = (
TaskProject.objects.filter(user=request.user)
.annotate(
task_count=Count("derived_tasks"),
epic_count=Count("epics", distinct=True),
source_count=Count("chat_sources", distinct=True),
)
.order_by("name")
)
projects = all_projects if show_empty else all_projects.filter(task_count__gt=0)
tasks = (
DerivedTask.objects.filter(user=request.user)
.select_related("project", "epic", "origin_message")
@@ -684,10 +673,7 @@ class TasksHub(LoginRequiredMixin, View):
tasks = _apply_task_creator_labels(request.user, tasks)
selected_project = None
if scope["selected_project_id"]:
selected_project = TaskProject.objects.filter(
user=request.user,
id=scope["selected_project_id"],
).first()
selected_project = all_projects.filter(id=scope["selected_project_id"]).first()
person_identifiers = []
person_identifier_rows = []
if scope["person"] is not None:
@@ -719,10 +705,12 @@ class TasksHub(LoginRequiredMixin, View):
)
return {
"projects": projects,
"project_choices": all_projects,
"tasks": tasks,
"scope": scope,
"person_identifier_rows": person_identifier_rows,
"selected_project": selected_project,
"show_empty_projects": show_empty,
}
def get(self, request):
@@ -802,9 +790,25 @@ class TasksHub(LoginRequiredMixin, View):
id=request.POST.get("project_id"),
user=request.user,
)
confirm_name = str(request.POST.get("confirm_name") or "").strip()
expected = str(project.name or "").strip()
if confirm_name != expected:
messages.error(
request,
f"Delete cancelled. Type the project name exactly to confirm deletion: {expected}",
)
return redirect("tasks_hub")
mapped_channels = list(project.chat_sources.values_list("service", "channel_identifier"))
deleted_name = str(project.name or "").strip() or "Project"
project.delete()
messages.success(request, f"Deleted project '{deleted_name}'.")
restored = _reseed_chat_sources_for_deleted_project(request.user, mapped_channels)
if restored > 0:
messages.success(
request,
f"Deleted project '{deleted_name}'. Restored {restored} chat mapping(s) with default projects.",
)
else:
messages.success(request, f"Deleted project '{deleted_name}'.")
return redirect("tasks_hub")
return redirect("tasks_hub")
@@ -891,9 +895,25 @@ class TaskProjectDetail(LoginRequiredMixin, View):
return redirect("tasks_project", project_id=str(project.id))
if action == "project_delete":
confirm_name = str(request.POST.get("confirm_name") or "").strip()
expected = str(project.name or "").strip()
if confirm_name != expected:
messages.error(
request,
f"Delete cancelled. Type the project name exactly to confirm deletion: {expected}",
)
return redirect("tasks_project", project_id=str(project.id))
mapped_channels = list(project.chat_sources.values_list("service", "channel_identifier"))
deleted_name = str(project.name or "").strip() or "Project"
project.delete()
messages.success(request, f"Deleted project '{deleted_name}'.")
restored = _reseed_chat_sources_for_deleted_project(request.user, mapped_channels)
if restored > 0:
messages.success(
request,
f"Deleted project '{deleted_name}'. Restored {restored} chat mapping(s) with default projects.",
)
else:
messages.success(request, f"Deleted project '{deleted_name}'.")
return redirect("tasks_hub")
return redirect("tasks_project", project_id=str(project.id))
@@ -928,6 +948,17 @@ class TaskGroupDetail(LoginRequiredMixin, View):
channel_identifier__in=variants,
).select_related("project", "epic")
mappings = list(mappings)
if not mappings:
seeded = ensure_default_source_for_chat(
user=request.user,
service=channel["service_key"],
channel_identifier=channel["display_identifier"],
)
if seeded is not None:
mappings = list(
ChatTaskSource.objects.filter(id=seeded.id)
.select_related("project", "epic")
)
for row in mappings:
row_channel = _resolve_channel_display(
request.user,
@@ -962,6 +993,7 @@ class TaskGroupDetail(LoginRequiredMixin, View):
"channel_display_name": channel["display_name"],
"projects": TaskProject.objects.filter(user=request.user).order_by("name"),
"mappings": mappings,
"primary_project": mappings[0].project if mappings else None,
"tasks": tasks,
},
)
@@ -1015,6 +1047,35 @@ class TaskGroupDetail(LoginRequiredMixin, View):
epic=epic,
)
messages.success(request, f"Mapped '{project.name}' to this group.")
elif action == "group_project_rename":
current = (
ChatTaskSource.objects.filter(
user=request.user,
service=channel["service_key"],
channel_identifier=channel["display_identifier"],
enabled=True,
)
.select_related("project")
.order_by("-updated_at")
.first()
)
if current is None:
current = ensure_default_source_for_chat(
user=request.user,
service=channel["service_key"],
channel_identifier=channel["display_identifier"],
)
new_name = str(request.POST.get("project_name") or "").strip()
if current is None or current.project is None:
messages.error(request, "No mapped project found for this chat.")
elif not new_name:
messages.error(request, "Project name is required.")
elif TaskProject.objects.filter(user=request.user, name=new_name).exclude(id=current.project_id).exists():
messages.error(request, f"Project '{new_name}' already exists.")
else:
current.project.name = new_name
current.project.save(update_fields=["name", "updated_at"])
messages.success(request, f"Renamed project to '{new_name}'.")
return redirect(
"tasks_group",
service=channel["service_key"],
@@ -1427,7 +1488,10 @@ class TaskCodexSubmit(LoginRequiredMixin, View):
mode="default",
source_message=getattr(task, "origin_message", None),
)
messages.success(request, f"Sent task #{task.reference_code} to Codex (run {run.id}).")
messages.success(
request,
f"Queued approval for task #{task.reference_code} before Codex run {run.id}.",
)
return redirect(next_url)
@@ -1524,28 +1588,49 @@ class CodexApprovalAction(LoginRequiredMixin, View):
"resolution_note",
]
)
if row.external_sync_event_id:
ExternalSyncEvent.objects.filter(id=row.external_sync_event_id).update(
status="ok",
error="",
)
run = row.codex_run
run.status = "approved_waiting_resume"
run.error = ""
run.save(update_fields=["status", "error", "updated_at"])
provider_payload = dict(run.request_payload.get("provider_payload") or {})
provider_payload.update(
{
"mode": "approval_response",
"approval_key": row.approval_key,
"resume_payload": dict(row.resume_payload or {}),
"codex_run_id": str(run.id),
}
)
resume_payload = dict(row.resume_payload or {})
resume_action = str(resume_payload.get("action") or "").strip().lower()
resume_provider_payload = dict(resume_payload.get("provider_payload") or {})
if resume_action and resume_provider_payload:
provider_payload = dict(resume_provider_payload)
provider_payload["codex_run_id"] = str(run.id)
event_action = resume_action
resume_idempotency_key = str(resume_payload.get("idempotency_key") or "").strip()
resume_event_key = (
resume_idempotency_key
if resume_idempotency_key
else f"codex_approval:{row.approval_key}:approved"
)
else:
provider_payload = dict(run.request_payload.get("provider_payload") or {})
provider_payload.update(
{
"mode": "approval_response",
"approval_key": row.approval_key,
"resume_payload": dict(row.resume_payload or {}),
"codex_run_id": str(run.id),
}
)
event_action = "append_update"
resume_event_key = f"codex_approval:{row.approval_key}:approved"
ExternalSyncEvent.objects.update_or_create(
idempotency_key=f"codex_approval:{row.approval_key}:approved",
idempotency_key=resume_event_key,
defaults={
"user": request.user,
"task": run.task,
"task_event": run.derived_task_event,
"provider": "codex_cli",
"status": "pending",
"payload": {"action": "append_update", "provider_payload": provider_payload},
"payload": {"action": event_action, "provider_payload": provider_payload},
"error": "",
},
)