Increase platform abstraction cohesion

This commit is contained in:
2026-03-06 17:47:58 +00:00
parent 438e561da0
commit 8c091b1e6d
55 changed files with 6555 additions and 440 deletions

View File

@@ -193,6 +193,85 @@ def _extract_signal_reaction(envelope):
}
def _extract_signal_edit(envelope):
paths = [
("dataMessage", "editMessage"),
("syncMessage", "sentMessage", "editMessage"),
("syncMessage", "editMessage"),
]
node = None
for path in paths:
candidate = _get_nested(envelope, path)
if isinstance(candidate, dict):
node = candidate
break
if not isinstance(node, dict):
return None
target_ts = node.get("targetSentTimestamp")
if target_ts is None:
target_ts = node.get("targetTimestamp")
if target_ts is None:
target_ts = node.get("targetTs")
try:
target_ts = int(target_ts)
except Exception:
target_ts = 0
if target_ts <= 0:
return None
data_message = node.get("dataMessage") or node.get("message") or {}
new_text = ""
if isinstance(data_message, dict):
for key in ("message", "text", "body", "caption"):
value = str(data_message.get(key) or "").strip()
if value:
new_text = value
break
if not new_text:
new_text = str(node.get("message") or "").strip()
if not new_text:
return None
return {
"target_ts": target_ts,
"new_text": new_text,
"raw": dict(node),
}
def _extract_signal_delete(envelope):
paths = [
("dataMessage", "delete"),
("dataMessage", "remoteDelete"),
("syncMessage", "sentMessage", "delete"),
("syncMessage", "delete"),
]
node = None
for path in paths:
candidate = _get_nested(envelope, path)
if isinstance(candidate, dict):
node = candidate
break
if not isinstance(node, dict):
return None
target_ts = node.get("targetSentTimestamp")
if target_ts is None:
target_ts = node.get("targetTimestamp")
if target_ts is None:
target_ts = node.get("targetTs")
try:
target_ts = int(target_ts)
except Exception:
target_ts = 0
if target_ts <= 0:
return None
return {
"target_ts": target_ts,
"raw": dict(node),
}
def _extract_signal_text(raw_payload, default_text=""):
text = str(default_text or "").strip()
if text:
@@ -1299,6 +1378,8 @@ class SignalClient(ClientBase):
destination_number,
)
reaction_payload = _extract_signal_reaction(envelope)
edit_payload = _extract_signal_edit(envelope)
delete_payload = _extract_signal_delete(envelope)
if identifiers and isinstance(reaction_payload, dict):
source_uuid = str(
envelope.get("sourceUuid") or envelope.get("source") or ""
@@ -1343,6 +1424,61 @@ class SignalClient(ClientBase):
self.log.warning(
"signal raw sync reaction relay to XMPP failed: %s", exc
)
if identifiers and isinstance(edit_payload, dict):
source_uuid = str(
envelope.get("sourceUuid") or envelope.get("source") or ""
).strip()
source_number = str(envelope.get("sourceNumber") or "").strip()
for identifier in identifiers:
try:
await history.apply_message_edit(
identifier.user,
identifier,
target_message_id="",
target_ts=int(edit_payload.get("target_ts") or 0),
new_text=str(edit_payload.get("new_text") or ""),
source_service="signal",
actor=(source_uuid or source_number or ""),
payload=edit_payload.get("raw") or {},
)
except Exception as exc:
self.log.warning(
"signal raw sync edit history apply failed: %s", exc
)
transport.update_runtime_state(
self.service,
last_inbound_ok_ts=int(time.time() * 1000),
last_inbound_exception_type="",
last_inbound_exception_message="",
)
return
if identifiers and isinstance(delete_payload, dict):
source_uuid = str(
envelope.get("sourceUuid") or envelope.get("source") or ""
).strip()
source_number = str(envelope.get("sourceNumber") or "").strip()
for identifier in identifiers:
try:
await history.apply_message_delete(
identifier.user,
identifier,
target_message_id="",
target_ts=int(delete_payload.get("target_ts") or 0),
source_service="signal",
actor=(source_uuid or source_number or ""),
payload=delete_payload.get("raw") or {},
)
except Exception as exc:
self.log.warning(
"signal raw sync delete history apply failed: %s", exc
)
transport.update_runtime_state(
self.service,
last_inbound_ok_ts=int(time.time() * 1000),
last_inbound_exception_type="",
last_inbound_exception_message="",
)
return
if identifiers and text:
ts_raw = (
sync_sent_message.get("timestamp")
@@ -1427,8 +1563,14 @@ class SignalClient(ClientBase):
identifiers = await self._resolve_signal_identifiers(source_uuid, source_number)
reaction_payload = _extract_signal_reaction(envelope)
if (not identifiers) and isinstance(reaction_payload, dict):
# Sync reactions from our own linked device can arrive with source=our
edit_payload = _extract_signal_edit(envelope)
delete_payload = _extract_signal_delete(envelope)
if (not identifiers) and (
isinstance(reaction_payload, dict)
or isinstance(edit_payload, dict)
or isinstance(delete_payload, dict)
):
# Sync events from our own linked device can arrive with source=our
# account and destination=<contact>. Resolve by destination as fallback.
destination_uuid = str(
envelope.get("destinationServiceId")
@@ -1497,6 +1639,49 @@ class SignalClient(ClientBase):
last_inbound_exception_message="",
)
return
if isinstance(edit_payload, dict):
for identifier in identifiers:
try:
await history.apply_message_edit(
identifier.user,
identifier,
target_message_id="",
target_ts=int(edit_payload.get("target_ts") or 0),
new_text=str(edit_payload.get("new_text") or ""),
source_service="signal",
actor=(source_uuid or source_number or ""),
payload=edit_payload.get("raw") or {},
)
except Exception as exc:
self.log.warning("signal raw edit history apply failed: %s", exc)
transport.update_runtime_state(
self.service,
last_inbound_ok_ts=int(time.time() * 1000),
last_inbound_exception_type="",
last_inbound_exception_message="",
)
return
if isinstance(delete_payload, dict):
for identifier in identifiers:
try:
await history.apply_message_delete(
identifier.user,
identifier,
target_message_id="",
target_ts=int(delete_payload.get("target_ts") or 0),
source_service="signal",
actor=(source_uuid or source_number or ""),
payload=delete_payload.get("raw") or {},
)
except Exception as exc:
self.log.warning("signal raw delete history apply failed: %s", exc)
transport.update_runtime_state(
self.service,
last_inbound_ok_ts=int(time.time() * 1000),
last_inbound_exception_type="",
last_inbound_exception_message="",
)
return
text = _extract_signal_text(payload, str(data_message.get("message") or "").strip())
if not text:

View File

@@ -776,6 +776,14 @@ async def send_message_raw(
Unified outbound send path used by models/views/UR.
"""
service_key = _service_key(service)
if _capability_checks_enabled() and not supports(service_key, "send"):
reason = unsupported_reason(service_key, "send")
log.warning(
"capability-check failed service=%s feature=send: %s",
service_key,
reason,
)
return False
if service_key == "signal":
prepared_attachments = await prepare_outbound_attachments(
service_key, attachments or []

View File

@@ -141,10 +141,14 @@ class XMPPComponent(ComponentXMPP):
self._reconnect_task = None
self._reconnect_delay_seconds = 1.0
self._reconnect_delay_max_seconds = 30.0
self._connect_inflight = False
self._session_live = False
self.log = logs.get_logger("XMPP")
super().__init__(jid, secret, server, port)
# Use one reconnect strategy (our backoff loop) to avoid reconnect churn.
self.auto_reconnect = False
# Register chat state plugins
register_stanza_plugin(Message, Active)
register_stanza_plugin(Message, Composing)
@@ -178,6 +182,21 @@ class XMPPComponent(ComponentXMPP):
self.add_event_handler("chatstate_inactive", self.on_chatstate_inactive)
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
def _user_xmpp_domain(self):
domain = str(getattr(settings, "XMPP_USER_DOMAIN", "") or "").strip()
if domain:
return domain
component_jid = str(getattr(settings, "XMPP_JID", "") or "").strip()
if "." in component_jid:
return component_jid.split(".", 1)[1]
configured_domain = str(getattr(settings, "DOMAIN", "") or "").strip()
if configured_domain:
return configured_domain
return str(getattr(settings, "XMPP_ADDRESS", "") or "").strip()
def _user_jid(self, username):
return f"{username}@{self._user_xmpp_domain()}"
async def enable_carbons(self):
"""Enable XMPP Message Carbons (XEP-0280)"""
try:
@@ -827,25 +846,33 @@ class XMPPComponent(ComponentXMPP):
async def session_start(self, *args):
self.log.info("XMPP session started")
self._session_live = True
self._connect_inflight = False
self._reconnect_delay_seconds = 1.0
if self._reconnect_task and not self._reconnect_task.done():
self._reconnect_task.cancel()
self._reconnect_task = None
await self.enable_carbons()
# This client connects as an external component, not a user client;
# XEP-0280 (carbons) is client-scoped and not valid here.
self.log.debug("Skipping carbons enable for component session")
async def _reconnect_loop(self):
try:
while True:
delay = float(self._reconnect_delay_seconds)
await asyncio.sleep(delay)
if self._session_live or self._connect_inflight:
return
try:
self.log.info("XMPP reconnect attempt delay_s=%.1f", delay)
self._connect_inflight = True
connected = self.connect()
if connected is False:
raise RuntimeError("connect returned false")
return
except Exception as exc:
self.log.warning("XMPP reconnect attempt failed: %s", exc)
self._connect_inflight = False
self._reconnect_delay_seconds = min(
self._reconnect_delay_max_seconds,
max(1.0, float(self._reconnect_delay_seconds) * 2.0),
@@ -853,6 +880,8 @@ class XMPPComponent(ComponentXMPP):
except asyncio.CancelledError:
return
finally:
if not self._session_live:
self._connect_inflight = False
self._reconnect_task = None
def _schedule_reconnect(self):
@@ -864,6 +893,8 @@ class XMPPComponent(ComponentXMPP):
"""
Handles XMPP disconnection and triggers a reconnect loop.
"""
self._session_live = False
self._connect_inflight = False
self.log.warning(
"XMPP disconnected, scheduling reconnect attempt in %.1fs",
float(self._reconnect_delay_seconds),
@@ -1576,7 +1607,7 @@ class XMPPComponent(ComponentXMPP):
f"{person_identifier.person.name.lower()}|"
f"{person_identifier.service}@{settings.XMPP_JID}"
)
recipient_jid = f"{user.username}@{settings.XMPP_ADDRESS}"
recipient_jid = self._user_jid(user.username)
await self.send_xmpp_reaction(
recipient_jid,
sender_jid,
@@ -1625,7 +1656,7 @@ class XMPPComponent(ComponentXMPP):
f"{person_identifier.person.name.lower()}|"
f"{person_identifier.service}@{settings.XMPP_JID}"
)
recipient_jid = f"{user.username}@{settings.XMPP_ADDRESS}"
recipient_jid = self._user_jid(user.username)
await self.send_chat_state(recipient_jid, sender_jid, started)
async def send_from_external(
@@ -1640,7 +1671,7 @@ class XMPPComponent(ComponentXMPP):
"""Handles sending XMPP messages with text and attachments."""
sender_jid = f"{person_identifier.person.name.lower()}|{person_identifier.service}@{settings.XMPP_JID}"
recipient_jid = f"{person_identifier.user.username}@{settings.XMPP_ADDRESS}"
recipient_jid = self._user_jid(person_identifier.user.username)
if is_outgoing_message:
xmpp_id = await self.send_xmpp_message(
recipient_jid,
@@ -1767,22 +1798,45 @@ class XMPPComponent(ComponentXMPP):
class XMPPClient(ClientBase):
def __init__(self, ur, *args, **kwargs):
super().__init__(ur, *args, **kwargs)
self.client = XMPPComponent(
ur,
jid=settings.XMPP_JID,
secret=settings.XMPP_SECRET,
server=settings.XMPP_ADDRESS,
port=settings.XMPP_PORT,
)
self._enabled = True
self.client = None
jid = str(getattr(settings, "XMPP_JID", "") or "").strip()
secret = str(getattr(settings, "XMPP_SECRET", "") or "").strip()
server = str(getattr(settings, "XMPP_ADDRESS", "") or "").strip()
port = int(getattr(settings, "XMPP_PORT", 8888) or 8888)
missing = []
if not jid:
missing.append("XMPP_JID")
if not secret:
missing.append("XMPP_SECRET")
if not server:
missing.append("XMPP_ADDRESS")
if missing:
self._enabled = False
self.log.warning(
"XMPP client disabled due to missing configuration: %s",
", ".join(missing),
)
self.client.register_plugin("xep_0030") # Service Discovery
self.client.register_plugin("xep_0004") # Data Forms
self.client.register_plugin("xep_0060") # PubSub
self.client.register_plugin("xep_0199") # XMPP Ping
self.client.register_plugin("xep_0085") # Chat State Notifications
self.client.register_plugin("xep_0363") # HTTP File Upload
if self._enabled:
self.client = XMPPComponent(
ur,
jid=jid,
secret=secret,
server=server,
port=port,
)
self.client.register_plugin("xep_0030") # Service Discovery
self.client.register_plugin("xep_0004") # Data Forms
self.client.register_plugin("xep_0060") # PubSub
self.client.register_plugin("xep_0199") # XMPP Ping
self.client.register_plugin("xep_0085") # Chat State Notifications
self.client.register_plugin("xep_0363") # HTTP File Upload
def start(self):
if not self._enabled or self.client is None:
return
self.log.info("XMPP client starting...")
# ensure slixmpp uses the same asyncio loop as the router
@@ -1791,7 +1845,11 @@ class XMPPClient(ClientBase):
self.client.connect()
async def start_typing_for_person(self, user, person_identifier):
if self.client is None:
return
await self.client.send_typing_for_person(user, person_identifier, True)
async def stop_typing_for_person(self, user, person_identifier):
if self.client is None:
return
await self.client.send_typing_for_person(user, person_identifier, False)

View File

@@ -0,0 +1,11 @@
from django.core.management.base import BaseCommand
from core.mcp.server import run_stdio_server
class Command(BaseCommand):
help = "Run GIA MCP stdio server with manticore/task/documentation tools."
def handle(self, *args, **options):
_ = args, options
run_stdio_server()

View File

@@ -0,0 +1,40 @@
from __future__ import annotations
import json
from django.core.management.base import BaseCommand
from core.memory.pipeline import run_memory_hygiene
class Command(BaseCommand):
help = "Run memory hygiene checks (stale decay + contradiction queueing)."
def add_arguments(self, parser):
parser.add_argument("--user-id", default="")
parser.add_argument("--dry-run", action="store_true", default=False)
parser.add_argument("--json", action="store_true", default=False)
def handle(self, *args, **options):
user_id_raw = str(options.get("user_id") or "").strip()
dry_run = bool(options.get("dry_run"))
as_json = bool(options.get("json"))
user_id = int(user_id_raw) if user_id_raw else None
result = run_memory_hygiene(user_id=user_id, dry_run=dry_run)
payload = {
"user_id": user_id,
"dry_run": dry_run,
"result": result,
}
if as_json:
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
return
self.stdout.write(
"memory-hygiene "
f"user={user_id if user_id is not None else '-'} "
f"dry_run={'yes' if dry_run else 'no'} "
f"expired={int(result.get('expired') or 0)} "
f"contradictions={int(result.get('contradictions') or 0)} "
f"queued={int(result.get('queued_requests') or 0)}"
)

View File

@@ -0,0 +1,46 @@
from __future__ import annotations
import json
from django.core.management.base import BaseCommand, CommandError
from core.memory.pipeline import suggest_memories_from_recent_messages
class Command(BaseCommand):
help = "Suggest proposed MemoryItem rows from recent inbound message text."
def add_arguments(self, parser):
parser.add_argument("--user-id", required=True)
parser.add_argument("--limit-messages", type=int, default=300)
parser.add_argument("--max-items", type=int, default=30)
parser.add_argument("--json", action="store_true", default=False)
def handle(self, *args, **options):
user_id_raw = str(options.get("user_id") or "").strip()
if not user_id_raw:
raise CommandError("--user-id is required")
limit_messages = max(1, int(options.get("limit_messages") or 300))
max_items = max(1, int(options.get("max_items") or 30))
as_json = bool(options.get("json"))
result = suggest_memories_from_recent_messages(
user_id=int(user_id_raw),
limit_messages=limit_messages,
max_items=max_items,
)
payload = {
"user_id": int(user_id_raw),
"limit_messages": limit_messages,
"max_items": max_items,
"result": result,
}
if as_json:
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
return
self.stdout.write(
"memory-suggest-from-messages "
f"user={payload['user_id']} "
f"scanned={int(result.get('scanned') or 0)} "
f"queued={int(result.get('queued') or 0)}"
)

3
core/mcp/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
from .server import run_stdio_server
__all__ = ["run_stdio_server"]

149
core/mcp/server.py Normal file
View File

@@ -0,0 +1,149 @@
from __future__ import annotations
import json
import os
import sys
from typing import Any
import django
from core.mcp.tools import execute_tool, format_tool_content, tool_specs
from core.util import logs
log = logs.get_logger("mcp-server")
_compat_newline_mode = False
def _setup_django() -> None:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
django.setup()
def _response(msg_id: Any, result: dict[str, Any]) -> dict[str, Any]:
return {"jsonrpc": "2.0", "id": msg_id, "result": result}
def _error(msg_id: Any, code: int, message: str) -> dict[str, Any]:
return {"jsonrpc": "2.0", "id": msg_id, "error": {"code": code, "message": message}}
def _read_message() -> dict[str, Any] | None:
global _compat_newline_mode
headers: dict[str, str] = {}
pending_body = b""
while True:
line = sys.stdin.buffer.readline()
if not line:
return None
if not headers and line.lstrip().startswith((b"{", b"[")):
_compat_newline_mode = True
return json.loads(line.decode("utf-8").strip())
sep = None
if b"\r\n\r\n" in line:
sep = b"\r\n\r\n"
elif b"\n\n" in line:
sep = b"\n\n"
if sep is not None:
header_line, tail = line.split(sep, 1)
pending_body = tail
else:
header_line = line
if header_line in (b"\r\n", b"\n"):
break
decoded = header_line.decode("utf-8").strip()
if ":" in decoded:
key, value = decoded.split(":", 1)
headers[key.strip().lower()] = value.strip()
if sep is not None:
break
length_raw = headers.get("content-length")
if not length_raw:
if not pending_body:
pending_body = sys.stdin.buffer.readline()
if not pending_body:
return None
_compat_newline_mode = True
return json.loads(pending_body.decode("utf-8").strip())
length = int(length_raw)
body = pending_body
if len(body) < length:
body += sys.stdin.buffer.read(length - len(body))
body = body[:length]
if not body:
return None
return json.loads(body.decode("utf-8"))
def _write_message(payload: dict[str, Any]) -> None:
raw_json = json.dumps(payload, separators=(",", ":"), ensure_ascii=False)
if _compat_newline_mode:
sys.stdout.buffer.write((raw_json + "\n").encode("utf-8"))
else:
raw = raw_json.encode("utf-8")
sys.stdout.buffer.write(f"Content-Length: {len(raw)}\r\n\r\n".encode("utf-8"))
sys.stdout.buffer.write(raw)
sys.stdout.buffer.flush()
def _handle_message(message: dict[str, Any]) -> dict[str, Any] | None:
msg_id = message.get("id")
method = str(message.get("method") or "")
params = message.get("params") or {}
if method == "notifications/initialized":
return None
if method == "initialize":
return _response(
msg_id,
{
"protocolVersion": "2025-06-18",
"serverInfo": {"name": "gia-manticore-mcp", "version": "0.1.0"},
"capabilities": {"tools": {}},
},
)
if method == "ping":
return _response(msg_id, {})
if method == "tools/list":
return _response(msg_id, {"tools": tool_specs()})
if method == "tools/call":
name = str(params.get("name") or "").strip()
arguments = params.get("arguments") or {}
try:
payload = execute_tool(name, arguments)
return _response(msg_id, format_tool_content(payload))
except Exception as exc:
log.warning("tool call failed name=%s err=%s", name, exc)
return _response(
msg_id,
{
"isError": True,
"content": [{"type": "text", "text": json.dumps({"error": str(exc)})}],
},
)
return _error(msg_id, -32601, f"Method not found: {method}")
def run_stdio_server() -> None:
_setup_django()
while True:
message = _read_message()
if message is None:
return
try:
response = _handle_message(message)
if response is not None:
_write_message(response)
except Exception as exc:
msg_id = message.get("id")
_write_message(_error(msg_id, -32000, str(exc)))
if __name__ == "__main__":
run_stdio_server()

1220
core/mcp/tools.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,4 @@
from .search_backend import get_memory_search_backend
from .retrieval import retrieve_memories_for_prompt
__all__ = ["get_memory_search_backend"]
__all__ = ["get_memory_search_backend", "retrieve_memories_for_prompt"]

419
core/memory/pipeline.py Normal file
View File

@@ -0,0 +1,419 @@
from __future__ import annotations
import re
from datetime import timezone as dt_timezone
from typing import Any
from django.db import transaction
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from core.models import (
MemoryChangeRequest,
MemoryItem,
MemorySourceReference,
MessageEvent,
WorkspaceConversation,
)
from core.util import logs
log = logs.get_logger("memory-pipeline")
_LIKE_RE = re.compile(
r"\b(?:i (?:like|love|prefer)|my favorite)\s+(?P<value>[^.!?]{2,120})",
re.IGNORECASE,
)
_DISLIKE_RE = re.compile(
r"\b(?:i (?:dislike|hate|avoid)|i don't like)\s+(?P<value>[^.!?]{2,120})",
re.IGNORECASE,
)
_STYLE_RE = re.compile(
r"\b(?:please|pls)\s+(?P<value>[^.!?]{3,120})",
re.IGNORECASE,
)
def _clean_value(value: str) -> str:
return " ".join(str(value or "").strip().split())
def extract_memory_candidates(text: str) -> list[dict[str, Any]]:
source = str(text or "").strip()
if not source:
return []
candidates: list[dict[str, Any]] = []
for regex, field, kind, confidence in (
(_LIKE_RE, "likes", "fact", 0.68),
(_DISLIKE_RE, "dislikes", "fact", 0.68),
(_STYLE_RE, "communication_style", "state", 0.52),
):
for match in regex.finditer(source):
value = _clean_value(match.group("value"))
if len(value) < 3:
continue
candidates.append(
{
"memory_kind": kind,
"field": field,
"text": value,
"confidence_score": confidence,
}
)
return candidates
def _existing_fingerprints(user_id: int) -> set[tuple[str, str, str, str]]:
items = MemoryItem.objects.filter(user_id=int(user_id)).only(
"memory_kind",
"conversation_id",
"person_id",
"content",
)
fingerprints = set()
for item in items:
content = item.content or {}
field = str(content.get("field") or "").strip().lower()
text = _clean_value(str(content.get("text") or "")).lower()
fingerprints.add(
(
str(item.memory_kind or "").strip().lower(),
str(item.conversation_id or "").strip(),
str(item.person_id or "").strip(),
f"{field}:{text}",
)
)
return fingerprints
def _infer_single_person_id(conversation: WorkspaceConversation) -> str:
participant_ids = list(conversation.participants.values_list("id", flat=True)[:2])
if len(participant_ids) != 1:
return ""
return str(participant_ids[0] or "")
@transaction.atomic
def suggest_memories_from_recent_messages(
*,
user_id: int,
limit_messages: int = 300,
max_items: int = 30,
) -> dict[str, int]:
safe_limit_messages = max(1, min(2000, int(limit_messages or 300)))
safe_max_items = max(1, min(500, int(max_items or 30)))
existing = _existing_fingerprints(int(user_id))
scanned = 0
queued = 0
rows = (
MessageEvent.objects.filter(user_id=int(user_id), direction="in")
.select_related("conversation")
.order_by("-ts")[:safe_limit_messages]
)
for event in rows:
scanned += 1
person_id = _infer_single_person_id(event.conversation)
for candidate in extract_memory_candidates(event.text or ""):
field = str(candidate.get("field") or "").strip().lower()
text = _clean_value(str(candidate.get("text") or ""))
if not text:
continue
fingerprint = (
str(candidate.get("memory_kind") or "fact").strip().lower(),
str(event.conversation_id or "").strip(),
person_id,
f"{field}:{text.lower()}",
)
if fingerprint in existing:
continue
item = MemoryItem.objects.create(
user_id=int(user_id),
conversation=event.conversation,
person_id=person_id or None,
memory_kind=str(candidate.get("memory_kind") or "fact"),
status="proposed",
content={"field": field, "text": text},
provenance={
"pipeline": "message_regex",
"message_event_id": str(event.id),
},
confidence_score=float(candidate.get("confidence_score") or 0.5),
)
MemorySourceReference.objects.create(
memory=item,
message_event=event,
source_label="message_event",
)
MemoryChangeRequest.objects.create(
user_id=int(user_id),
memory=item,
conversation=event.conversation,
person_id=person_id or None,
action="create",
status="pending",
proposed_memory_kind=item.memory_kind,
proposed_content=item.content,
proposed_confidence_score=item.confidence_score,
reason="Auto-suggested from recent inbound messages.",
requested_by_identifier="memory-pipeline",
)
existing.add(fingerprint)
queued += 1
if queued >= safe_max_items:
return {"scanned": scanned, "queued": queued}
return {"scanned": scanned, "queued": queued}
def _coerce_expires_at(value: Any):
raw = str(value or "").strip()
if not raw:
return None
parsed = parse_datetime(raw)
if parsed is None:
raise ValueError("expires_at must be an ISO datetime")
if parsed.tzinfo is None:
return timezone.make_aware(parsed, dt_timezone.utc)
return parsed
@transaction.atomic
def create_memory_change_request(
*,
user_id: int,
action: str,
conversation_id: str = "",
person_id: str = "",
memory_id: str = "",
memory_kind: str = "",
content: dict[str, Any] | None = None,
confidence_score: float | None = None,
expires_at: str = "",
reason: str = "",
requested_by_identifier: str = "",
) -> MemoryChangeRequest:
normalized_action = str(action or "").strip().lower()
if normalized_action not in {"create", "update", "delete"}:
raise ValueError("action must be create/update/delete")
memory = None
if memory_id:
memory = MemoryItem.objects.filter(user_id=int(user_id), id=memory_id).first()
if memory is None:
raise ValueError("memory_id not found")
conversation = None
if conversation_id:
conversation = WorkspaceConversation.objects.filter(
user_id=int(user_id),
id=conversation_id,
).first()
if conversation is None:
raise ValueError("conversation_id not found")
if normalized_action == "create" and conversation is None:
raise ValueError("conversation_id is required for create")
if normalized_action in {"update", "delete"} and memory is None:
raise ValueError("memory_id is required for update/delete")
return MemoryChangeRequest.objects.create(
user_id=int(user_id),
memory=memory,
conversation=conversation or (memory.conversation if memory else None),
person_id=person_id or (str(memory.person_id or "") if memory else "") or None,
action=normalized_action,
status="pending",
proposed_memory_kind=str(memory_kind or (memory.memory_kind if memory else "")).strip(),
proposed_content=dict(content or {}),
proposed_confidence_score=(
float(confidence_score)
if confidence_score is not None
else (float(memory.confidence_score) if memory else None)
),
proposed_expires_at=_coerce_expires_at(expires_at),
reason=str(reason or "").strip(),
requested_by_identifier=str(requested_by_identifier or "").strip(),
)
@transaction.atomic
def review_memory_change_request(
*,
user_id: int,
request_id: str,
decision: str,
reviewer_identifier: str = "",
note: str = "",
) -> MemoryChangeRequest:
req = MemoryChangeRequest.objects.select_related("memory", "conversation").get(
id=request_id,
user_id=int(user_id),
)
if req.status != "pending":
raise ValueError("request is not pending")
now = timezone.now()
normalized_decision = str(decision or "").strip().lower()
if normalized_decision not in {"approve", "reject"}:
raise ValueError("decision must be approve/reject")
req.reviewed_by_identifier = str(reviewer_identifier or "").strip()
req.reviewed_at = now
if note:
req.reason = f"{req.reason}\n\nReview note: {str(note).strip()}".strip()
if normalized_decision == "reject":
req.status = "rejected"
req.save(
update_fields=[
"status",
"reviewed_by_identifier",
"reviewed_at",
"reason",
"updated_at",
]
)
return req
req.status = "approved"
req.save(
update_fields=[
"status",
"reviewed_by_identifier",
"reviewed_at",
"reason",
"updated_at",
]
)
memory = req.memory
if req.action == "create":
if memory is None:
if req.conversation is None:
raise ValueError("approved create request missing conversation")
memory = MemoryItem.objects.create(
user_id=int(user_id),
conversation=req.conversation,
person_id=req.person_id,
memory_kind=req.proposed_memory_kind or "fact",
status="active",
content=req.proposed_content or {},
confidence_score=float(req.proposed_confidence_score or 0.5),
expires_at=req.proposed_expires_at,
last_verified_at=now,
provenance={"approved_request_id": str(req.id)},
)
req.memory = memory
else:
memory.status = "active"
memory.last_verified_at = now
memory.save(update_fields=["status", "last_verified_at", "updated_at"])
elif req.action == "update":
if memory is None:
raise ValueError("approved update request missing memory")
if req.proposed_memory_kind:
memory.memory_kind = req.proposed_memory_kind
if req.proposed_content:
memory.content = req.proposed_content
if req.proposed_confidence_score is not None:
memory.confidence_score = float(req.proposed_confidence_score)
memory.expires_at = req.proposed_expires_at
memory.last_verified_at = now
memory.status = "active"
memory.save()
else:
if memory is None:
raise ValueError("approved delete request missing memory")
memory.status = "deprecated"
memory.last_verified_at = now
memory.save(update_fields=["status", "last_verified_at", "updated_at"])
req.status = "applied"
req.save(update_fields=["status", "memory", "updated_at"])
return req
@transaction.atomic
def run_memory_hygiene(*, user_id: int | None = None, dry_run: bool = False) -> dict[str, int]:
now = timezone.now()
queryset = MemoryItem.objects.filter(status="active")
if user_id is not None:
queryset = queryset.filter(user_id=int(user_id))
expired_ids = list(
queryset.filter(expires_at__isnull=False, expires_at__lte=now).values_list(
"id",
flat=True,
)
)
expired = len(expired_ids)
if expired and not dry_run:
MemoryItem.objects.filter(id__in=expired_ids).update(status="deprecated")
contradictions = 0
queued = 0
grouped: dict[tuple[int, str, str, str, str], dict[str, list[MemoryItem]]] = {}
for item in queryset.select_related("conversation", "person"):
content = item.content or {}
field = str(content.get("field") or content.get("key") or "").strip().lower()
text = _clean_value(str(content.get("text") or content.get("value") or "")).lower()
if not field or not text:
continue
scope = (
int(item.user_id),
str(item.person_id or ""),
str(item.conversation_id or ""),
str(item.memory_kind or ""),
field,
)
grouped.setdefault(scope, {})
grouped[scope].setdefault(text, [])
grouped[scope][text].append(item)
for values in grouped.values():
if len(values.keys()) <= 1:
continue
flat = [item for subset in values.values() for item in subset]
contradictions += len(flat)
if dry_run:
continue
for item in flat:
already_pending = MemoryChangeRequest.objects.filter(
user_id=item.user_id,
memory=item,
action="update",
status="pending",
reason__icontains="contradiction",
).exists()
if already_pending:
continue
MemoryChangeRequest.objects.create(
user_id=item.user_id,
memory=item,
conversation=item.conversation,
person=item.person,
action="update",
status="pending",
proposed_memory_kind=item.memory_kind,
proposed_content=item.content,
proposed_confidence_score=item.confidence_score,
proposed_expires_at=item.expires_at,
reason="Contradiction detected by hygiene job.",
requested_by_identifier="memory-hygiene",
)
queued += 1
log.info(
"memory hygiene user=%s dry_run=%s expired=%s contradictions=%s queued=%s",
user_id if user_id is not None else "-",
dry_run,
expired,
contradictions,
queued,
)
return {
"expired": expired,
"contradictions": contradictions,
"queued_requests": queued,
}

123
core/memory/retrieval.py Normal file
View File

@@ -0,0 +1,123 @@
from __future__ import annotations
from typing import Any
from django.db.models import Q
from django.utils import timezone
from core.memory.search_backend import get_memory_search_backend
from core.models import MemoryItem
def _coerce_statuses(value: Any, default: tuple[str, ...]) -> tuple[str, ...]:
if isinstance(value, (list, tuple, set)):
items = [str(item or "").strip().lower() for item in value]
else:
items = [item.strip().lower() for item in str(value or "").split(",")]
cleaned = tuple(item for item in items if item)
return cleaned or default
def _base_queryset(
*,
user_id: int,
person_id: str = "",
conversation_id: str = "",
statuses: tuple[str, ...] = ("active",),
):
now = timezone.now()
queryset = MemoryItem.objects.filter(user_id=int(user_id))
if statuses:
queryset = queryset.filter(status__in=list(statuses))
queryset = queryset.filter(Q(expires_at__isnull=True) | Q(expires_at__gt=now))
if person_id:
queryset = queryset.filter(person_id=person_id)
if conversation_id:
queryset = queryset.filter(conversation_id=conversation_id)
return queryset
def retrieve_memories_for_prompt(
*,
user_id: int,
query: str = "",
person_id: str = "",
conversation_id: str = "",
statuses: tuple[str, ...] = ("active",),
limit: int = 20,
) -> list[dict[str, Any]]:
statuses = _coerce_statuses(statuses, ("active",))
safe_limit = max(1, min(200, int(limit or 20)))
search_text = str(query or "").strip()
if search_text:
backend = get_memory_search_backend()
hits = backend.search(
user_id=int(user_id),
query=search_text,
conversation_id=conversation_id,
limit=safe_limit,
include_statuses=statuses,
)
ids = [str(hit.memory_id or "").strip() for hit in hits if str(hit.memory_id or "").strip()]
scoped = _base_queryset(
user_id=int(user_id),
person_id=person_id,
conversation_id=conversation_id,
statuses=statuses,
).filter(id__in=ids)
by_id = {str(item.id): item for item in scoped}
rows = []
for hit in hits:
item = by_id.get(str(hit.memory_id))
if not item:
continue
rows.append(
{
"id": str(item.id),
"memory_kind": str(item.memory_kind or ""),
"status": str(item.status or ""),
"person_id": str(item.person_id or ""),
"conversation_id": str(item.conversation_id or ""),
"content": item.content or {},
"provenance": item.provenance or {},
"confidence_score": float(item.confidence_score or 0.0),
"expires_at": item.expires_at.isoformat() if item.expires_at else "",
"last_verified_at": (
item.last_verified_at.isoformat() if item.last_verified_at else ""
),
"updated_at": item.updated_at.isoformat() if item.updated_at else "",
"search_score": float(hit.score or 0.0),
"search_summary": str(hit.summary or ""),
}
)
return rows
queryset = _base_queryset(
user_id=int(user_id),
person_id=person_id,
conversation_id=conversation_id,
statuses=statuses,
).order_by("-last_verified_at", "-updated_at")
rows = []
for item in queryset[:safe_limit]:
rows.append(
{
"id": str(item.id),
"memory_kind": str(item.memory_kind or ""),
"status": str(item.status or ""),
"person_id": str(item.person_id or ""),
"conversation_id": str(item.conversation_id or ""),
"content": item.content or {},
"provenance": item.provenance or {},
"confidence_score": float(item.confidence_score or 0.0),
"expires_at": item.expires_at.isoformat() if item.expires_at else "",
"last_verified_at": (
item.last_verified_at.isoformat() if item.last_verified_at else ""
),
"updated_at": item.updated_at.isoformat() if item.updated_at else "",
"search_score": 0.0,
"search_summary": "",
}
)
return rows

View File

@@ -137,6 +137,8 @@ class DjangoMemorySearchBackend(BaseMemorySearchBackend):
class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
name = "manticore"
_table_ready_cache: dict[str, float] = {}
_table_ready_ttl_seconds = 30.0
def __init__(self):
self.base_url = str(
@@ -146,6 +148,7 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
getattr(settings, "MANTICORE_MEMORY_TABLE", "gia_memory_items")
).strip() or "gia_memory_items"
self.timeout_seconds = int(getattr(settings, "MANTICORE_HTTP_TIMEOUT", 5) or 5)
self._table_cache_key = f"{self.base_url}|{self.table}"
def _sql(self, query: str) -> dict[str, Any]:
response = requests.post(
@@ -160,6 +163,9 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
return dict(payload or {})
def ensure_table(self) -> None:
last_ready = float(self._table_ready_cache.get(self._table_cache_key, 0.0) or 0.0)
if (time.time() - last_ready) <= float(self._table_ready_ttl_seconds):
return
self._sql(
(
f"CREATE TABLE IF NOT EXISTS {self.table} ("
@@ -175,6 +181,7 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
")"
)
)
self._table_ready_cache[self._table_cache_key] = time.time()
def _doc_id(self, memory_id: str) -> int:
digest = hashlib.blake2b(
@@ -206,11 +213,66 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
)
self._sql(query)
def _build_upsert_values_clause(self, item: MemoryItem) -> str:
memory_id = str(item.id)
doc_id = self._doc_id(memory_id)
summary = _flatten_to_text(item.content)[:280]
body = _flatten_to_text(item.content)
updated_ts = int(item.updated_at.timestamp() * 1000)
return (
f"({doc_id},'{self._escape(memory_id)}',{int(item.user_id)},"
f"'{self._escape(item.conversation_id)}','{self._escape(item.memory_kind)}',"
f"'{self._escape(item.status)}',{updated_ts},"
f"'{self._escape(summary)}','{self._escape(body)}')"
)
def delete(self, memory_id: str) -> None:
self.ensure_table()
doc_id = self._doc_id(memory_id)
self._sql(f"DELETE FROM {self.table} WHERE id={doc_id}")
def reindex(
self,
*,
user_id: int | None = None,
include_statuses: tuple[str, ...] = ("active",),
limit: int = 2000,
) -> dict[str, int]:
self.ensure_table()
queryset = MemoryItem.objects.all().order_by("-updated_at")
if user_id is not None:
queryset = queryset.filter(user_id=int(user_id))
if include_statuses:
queryset = queryset.filter(status__in=list(include_statuses))
scanned = 0
indexed = 0
batch_size = 100
values: list[str] = []
for item in queryset[: max(1, int(limit))]:
scanned += 1
try:
values.append(self._build_upsert_values_clause(item))
except Exception as exc:
log.warning("memory-search upsert build failed id=%s err=%s", item.id, exc)
continue
if len(values) >= batch_size:
self._sql(
f"REPLACE INTO {self.table} "
"(id,memory_uuid,user_id,conversation_id,memory_kind,status,updated_ts,summary,body) "
f"VALUES {','.join(values)}"
)
indexed += len(values)
values = []
if values:
self._sql(
f"REPLACE INTO {self.table} "
"(id,memory_uuid,user_id,conversation_id,memory_kind,status,updated_ts,summary,body) "
f"VALUES {','.join(values)}"
)
indexed += len(values)
return {"scanned": scanned, "indexed": indexed}
def search(
self,
*,

View File

@@ -1,5 +1,6 @@
from asgiref.sync import sync_to_async
from django.conf import settings
import time
import uuid
from core.events.ledger import append_event
@@ -628,6 +629,277 @@ async def apply_reaction(
return target
async def _resolve_message_target(
user,
identifier,
*,
target_message_id="",
target_ts=0,
target_author="",
):
queryset = Message.objects.filter(
user=user,
session__identifier=identifier,
).select_related("session")
target = None
match_strategy = "none"
target_author_value = str(target_author or "").strip()
target_uuid = str(target_message_id or "").strip()
if target_uuid:
is_uuid = True
try:
uuid.UUID(str(target_uuid))
except Exception:
is_uuid = False
if is_uuid:
target = await sync_to_async(
lambda: queryset.filter(id=target_uuid).order_by("-ts").first()
)()
if target is not None:
match_strategy = "local_message_id"
if target is None:
target = await sync_to_async(
lambda: queryset.filter(source_message_id=target_uuid)
.order_by("-ts")
.first()
)()
if target is not None:
match_strategy = "source_message_id"
if target is None:
try:
ts_value = int(target_ts or 0)
except Exception:
ts_value = 0
if ts_value > 0:
exact_candidates = await sync_to_async(list)(
queryset.filter(source_message_id=str(ts_value)).order_by("-ts")[:20]
)
if target_author_value and exact_candidates:
filtered = [
row
for row in exact_candidates
if str(row.sender_uuid or "").strip() == target_author_value
]
if filtered:
exact_candidates = filtered
if exact_candidates:
target = exact_candidates[0]
match_strategy = "exact_source_message_id_ts"
if target is None and ts_value > 0:
strict_ts_rows = await sync_to_async(list)(
queryset.filter(ts=ts_value).order_by("-id")[:20]
)
if target_author_value and strict_ts_rows:
filtered = [
row
for row in strict_ts_rows
if str(row.sender_uuid or "").strip() == target_author_value
]
if filtered:
strict_ts_rows = filtered
if strict_ts_rows:
target = strict_ts_rows[0]
match_strategy = "strict_ts_match"
if target is None and ts_value > 0:
lower = ts_value - 10_000
upper = ts_value + 10_000
window_rows = await sync_to_async(list)(
queryset.filter(ts__gte=lower, ts__lte=upper).order_by("ts")[:200]
)
if target_author_value and window_rows:
author_rows = [
row
for row in window_rows
if str(row.sender_uuid or "").strip() == target_author_value
]
if author_rows:
window_rows = author_rows
if window_rows:
target = min(
window_rows,
key=lambda row: (
abs(int(row.ts or 0) - ts_value),
-int(row.ts or 0),
),
)
match_strategy = "nearest_ts_window"
return target, match_strategy
async def apply_message_edit(
user,
identifier,
*,
target_message_id="",
target_ts=0,
new_text="",
source_service="",
actor="",
payload=None,
trace_id="",
target_author="",
):
target, match_strategy = await _resolve_message_target(
user,
identifier,
target_message_id=target_message_id,
target_ts=target_ts,
target_author=target_author,
)
if target is None:
log.warning(
"edit-sync history-apply miss user=%s person_identifier=%s target_message_id=%s target_ts=%s",
getattr(user, "id", "-"),
getattr(identifier, "id", "-"),
str(target_message_id or "") or "-",
int(target_ts or 0),
)
return None
old_text = str(target.text or "")
updated_text = str(new_text or "")
event_ts = int(target_ts or target.ts or int(time.time() * 1000))
receipt_payload = dict(target.receipt_payload or {})
edit_history = list(receipt_payload.get("edit_history") or [])
edit_history.append(
{
"edited_ts": int(event_ts),
"source_service": str(source_service or "").strip().lower(),
"actor": str(actor or "").strip(),
"previous_text": old_text,
"new_text": updated_text,
"match_strategy": str(match_strategy or ""),
"payload": dict(payload or {}),
}
)
if len(edit_history) > 200:
edit_history = edit_history[-200:]
receipt_payload["edit_history"] = edit_history
receipt_payload["last_edited_ts"] = int(event_ts)
receipt_payload["edit_count"] = len(edit_history)
target.receipt_payload = receipt_payload
update_fields = ["receipt_payload"]
if old_text != updated_text:
target.text = updated_text
update_fields.append("text")
await sync_to_async(target.save)(update_fields=update_fields)
try:
await append_event(
user=user,
session=target.session,
ts=int(event_ts),
event_type="message_edited",
direction="system",
actor_identifier=str(actor or ""),
origin_transport=str(source_service or ""),
origin_message_id=str(target.source_message_id or target.id),
origin_chat_id=str(target.source_chat_id or ""),
payload={
"message_id": str(target.id),
"target_message_id": str(target_message_id or target.id),
"target_ts": int(target_ts or target.ts or 0),
"old_text": old_text,
"new_text": updated_text,
"source_service": str(source_service or "").strip().lower(),
"actor": str(actor or ""),
"match_strategy": str(match_strategy or ""),
},
raw_payload=dict(payload or {}),
trace_id=ensure_trace_id(trace_id, payload or {}),
)
except Exception as exc:
log.warning(
"Event ledger append failed for message edit message=%s: %s",
target.id,
exc,
)
return target
async def apply_message_delete(
user,
identifier,
*,
target_message_id="",
target_ts=0,
source_service="",
actor="",
payload=None,
trace_id="",
target_author="",
):
target, match_strategy = await _resolve_message_target(
user,
identifier,
target_message_id=target_message_id,
target_ts=target_ts,
target_author=target_author,
)
if target is None:
log.warning(
"delete-sync history-apply miss user=%s person_identifier=%s target_message_id=%s target_ts=%s",
getattr(user, "id", "-"),
getattr(identifier, "id", "-"),
str(target_message_id or "") or "-",
int(target_ts or 0),
)
return None
event_ts = int(target_ts or target.ts or int(time.time() * 1000))
deleted_row = {
"deleted_ts": int(event_ts),
"source_service": str(source_service or "").strip().lower(),
"actor": str(actor or "").strip(),
"match_strategy": str(match_strategy or ""),
"payload": dict(payload or {}),
}
receipt_payload = dict(target.receipt_payload or {})
delete_events = list(receipt_payload.get("delete_events") or [])
delete_events.append(dict(deleted_row))
if len(delete_events) > 200:
delete_events = delete_events[-200:]
receipt_payload["delete_events"] = delete_events
receipt_payload["deleted"] = deleted_row
receipt_payload["is_deleted"] = True
target.receipt_payload = receipt_payload
await sync_to_async(target.save)(update_fields=["receipt_payload"])
try:
await append_event(
user=user,
session=target.session,
ts=int(event_ts),
event_type="message_deleted",
direction="system",
actor_identifier=str(actor or ""),
origin_transport=str(source_service or ""),
origin_message_id=str(target.source_message_id or target.id),
origin_chat_id=str(target.source_chat_id or ""),
payload={
"message_id": str(target.id),
"target_message_id": str(target_message_id or target.id),
"target_ts": int(target_ts or target.ts or 0),
"source_service": str(source_service or "").strip().lower(),
"actor": str(actor or ""),
"match_strategy": str(match_strategy or ""),
},
raw_payload=dict(payload or {}),
trace_id=ensure_trace_id(trace_id, payload or {}),
)
except Exception as exc:
log.warning(
"Event ledger append failed for message delete message=%s: %s",
target.id,
exc,
)
return target
def _iter_bridge_refs(receipt_payload, source_service):
payload = dict(receipt_payload or {})
refs = payload.get("bridge_refs") or {}

View File

@@ -0,0 +1,444 @@
# Generated by ChatGPT on 2026-03-05
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0035_conversationevent_adapterhealthevent"),
]
operations = [
migrations.AddField(
model_name="memoryitem",
name="confidence_score",
field=models.FloatField(
default=0.5,
help_text="Confidence score for this memory (0.0-1.0).",
),
),
migrations.AddField(
model_name="memoryitem",
name="expires_at",
field=models.DateTimeField(
blank=True,
help_text="Optional expiry timestamp for stale memory decay.",
null=True,
),
),
migrations.AddField(
model_name="memoryitem",
name="last_verified_at",
field=models.DateTimeField(
blank=True,
help_text="Last operator verification timestamp.",
null=True,
),
),
migrations.AddField(
model_name="memoryitem",
name="person",
field=models.ForeignKey(
blank=True,
help_text=(
"Optional person this memory is about for person-centric recall."
),
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_items",
to="core.person",
),
),
migrations.AddField(
model_name="memoryitem",
name="provenance",
field=models.JSONField(
blank=True,
default=dict,
help_text=(
"Source metadata for this memory (agent/tool/message references)."
),
),
),
migrations.CreateModel(
name="KnowledgeArticle",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("title", models.CharField(max_length=255)),
("slug", models.SlugField(max_length=255)),
("markdown", models.TextField(blank=True, default="")),
("tags", models.JSONField(blank=True, default=list)),
(
"status",
models.CharField(
choices=[
("draft", "Draft"),
("published", "Published"),
("archived", "Archived"),
],
default="draft",
max_length=16,
),
),
("owner_identifier", models.CharField(blank=True, default="", max_length=255)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"related_task",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="knowledge_articles",
to="core.derivedtask",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="knowledge_articles",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"indexes": [
models.Index(
fields=["user", "status", "updated_at"],
name="core_knowl_user_id_331625_idx",
),
models.Index(
fields=["related_task", "updated_at"],
name="core_knowl_related_cf6071_idx",
),
],
"constraints": [
models.UniqueConstraint(
fields=("user", "slug"),
name="unique_knowledge_article_slug_per_user",
)
],
},
),
migrations.CreateModel(
name="MCPToolAuditLog",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("tool_name", models.CharField(max_length=255)),
("request_args", models.JSONField(blank=True, default=dict)),
("response_meta", models.JSONField(blank=True, default=dict)),
("ok", models.BooleanField(default=True)),
("error", models.TextField(blank=True, default="")),
("duration_ms", models.PositiveIntegerField(default=0)),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="mcp_tool_audit_logs",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"indexes": [
models.Index(
fields=["tool_name", "created_at"],
name="core_mcpau_tool_na_2db9d7_idx",
),
models.Index(
fields=["user", "created_at"],
name="core_mcpau_user_id_4a55f1_idx",
),
models.Index(
fields=["ok", "created_at"],
name="core_mcpau_ok_1f5c91_idx",
),
]
},
),
migrations.CreateModel(
name="MemoryChangeRequest",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"action",
models.CharField(
choices=[
("create", "Create"),
("update", "Update"),
("delete", "Delete"),
],
max_length=16,
),
),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("approved", "Approved"),
("rejected", "Rejected"),
("applied", "Applied"),
],
default="pending",
max_length=16,
),
),
("proposed_memory_kind", models.CharField(blank=True, default="", max_length=16)),
("proposed_content", models.JSONField(blank=True, default=dict)),
("proposed_confidence_score", models.FloatField(blank=True, null=True)),
("proposed_expires_at", models.DateTimeField(blank=True, null=True)),
("reason", models.TextField(blank=True, default="")),
("requested_by_identifier", models.CharField(blank=True, default="", max_length=255)),
("reviewed_by_identifier", models.CharField(blank=True, default="", max_length=255)),
("reviewed_at", models.DateTimeField(blank=True, null=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"conversation",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_change_requests",
to="core.workspaceconversation",
),
),
(
"memory",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="change_requests",
to="core.memoryitem",
),
),
(
"person",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_change_requests",
to="core.person",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="memory_change_requests",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"indexes": [
models.Index(
fields=["user", "status", "created_at"],
name="core_memor_user_id_31963a_idx",
),
models.Index(
fields=["memory", "created_at"],
name="core_memor_memory__1b9d7e_idx",
),
]
},
),
migrations.CreateModel(
name="MemorySourceReference",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("source_label", models.CharField(blank=True, default="", max_length=255)),
("source_uri", models.CharField(blank=True, default="", max_length=1024)),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"memory",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="source_references",
to="core.memoryitem",
),
),
(
"message",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_source_references",
to="core.message",
),
),
(
"message_event",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_source_references",
to="core.messageevent",
),
),
(
"source_request",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="memory_source_references",
to="core.airequest",
),
),
],
options={
"indexes": [
models.Index(
fields=["memory", "created_at"],
name="core_memor_memory__92752b_idx",
),
models.Index(fields=["source_uri"], name="core_memor_source__5bb587_idx"),
]
},
),
migrations.CreateModel(
name="TaskArtifactLink",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("kind", models.CharField(default="note", max_length=64)),
("uri", models.CharField(blank=True, default="", max_length=1024)),
("path", models.CharField(blank=True, default="", max_length=1024)),
("summary", models.TextField(blank=True, default="")),
("created_by_identifier", models.CharField(blank=True, default="", max_length=255)),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"task",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="artifact_links",
to="core.derivedtask",
),
),
],
options={
"indexes": [
models.Index(
fields=["task", "created_at"],
name="core_taskar_task_id_cf5572_idx",
),
models.Index(
fields=["kind", "created_at"],
name="core_taskar_kind_5dbab7_idx",
),
]
},
),
migrations.CreateModel(
name="KnowledgeRevision",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("revision", models.PositiveIntegerField()),
("author_tool", models.CharField(blank=True, default="", max_length=255)),
("author_identifier", models.CharField(blank=True, default="", max_length=255)),
("summary", models.TextField(blank=True, default="")),
("markdown", models.TextField(blank=True, default="")),
("created_at", models.DateTimeField(auto_now_add=True)),
(
"article",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="revisions",
to="core.knowledgearticle",
),
),
],
options={
"ordering": ["article", "revision"],
"constraints": [
models.UniqueConstraint(
fields=("article", "revision"),
name="unique_knowledge_revision_per_article",
)
],
},
),
migrations.AddIndex(
model_name="memoryitem",
index=models.Index(
fields=["user", "status", "updated_at"],
name="core_mem_user_stat_upd_idx",
),
),
migrations.AddIndex(
model_name="memoryitem",
index=models.Index(
fields=["user", "person", "status", "updated_at"],
name="core_mem_user_pers_stat_idx",
),
),
migrations.AddIndex(
model_name="memoryitem",
index=models.Index(
fields=["user", "conversation", "status", "updated_at"],
name="core_mem_user_conv_stat_idx",
),
),
]

View File

@@ -1129,6 +1129,14 @@ class MemoryItem(models.Model):
related_name="memory_items",
help_text="Conversation scope this memory item belongs to.",
)
person = models.ForeignKey(
Person,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_items",
help_text="Optional person this memory is about for person-centric recall.",
)
memory_kind = models.CharField(
max_length=16,
choices=MEMORY_KIND_CHOICES,
@@ -1145,6 +1153,25 @@ class MemoryItem(models.Model):
blank=True,
help_text="Structured memory payload (schema can evolve by type).",
)
provenance = models.JSONField(
default=dict,
blank=True,
help_text="Source metadata for this memory (agent/tool/message references).",
)
confidence_score = models.FloatField(
default=0.5,
help_text="Confidence score for this memory (0.0-1.0).",
)
expires_at = models.DateTimeField(
null=True,
blank=True,
help_text="Optional expiry timestamp for stale memory decay.",
)
last_verified_at = models.DateTimeField(
null=True,
blank=True,
help_text="Last operator verification timestamp.",
)
source_request = models.ForeignKey(
AIRequest,
on_delete=models.SET_NULL,
@@ -1161,6 +1188,111 @@ class MemoryItem(models.Model):
help_text="Last update timestamp.",
)
class Meta:
indexes = [
models.Index(fields=["user", "status", "updated_at"]),
models.Index(fields=["user", "person", "status", "updated_at"]),
models.Index(fields=["user", "conversation", "status", "updated_at"]),
]
class MemorySourceReference(models.Model):
memory = models.ForeignKey(
MemoryItem,
on_delete=models.CASCADE,
related_name="source_references",
)
message_event = models.ForeignKey(
"MessageEvent",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_source_references",
)
message = models.ForeignKey(
Message,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_source_references",
)
source_request = models.ForeignKey(
AIRequest,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_source_references",
)
source_label = models.CharField(max_length=255, blank=True, default="")
source_uri = models.CharField(max_length=1024, blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
indexes = [
models.Index(fields=["memory", "created_at"]),
models.Index(fields=["source_uri"]),
]
class MemoryChangeRequest(models.Model):
ACTION_CHOICES = (
("create", "Create"),
("update", "Update"),
("delete", "Delete"),
)
STATUS_CHOICES = (
("pending", "Pending"),
("approved", "Approved"),
("rejected", "Rejected"),
("applied", "Applied"),
)
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name="memory_change_requests",
)
memory = models.ForeignKey(
MemoryItem,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="change_requests",
)
conversation = models.ForeignKey(
WorkspaceConversation,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_change_requests",
)
person = models.ForeignKey(
Person,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="memory_change_requests",
)
action = models.CharField(max_length=16, choices=ACTION_CHOICES)
status = models.CharField(max_length=16, choices=STATUS_CHOICES, default="pending")
proposed_memory_kind = models.CharField(max_length=16, blank=True, default="")
proposed_content = models.JSONField(default=dict, blank=True)
proposed_confidence_score = models.FloatField(null=True, blank=True)
proposed_expires_at = models.DateTimeField(null=True, blank=True)
reason = models.TextField(blank=True, default="")
requested_by_identifier = models.CharField(max_length=255, blank=True, default="")
reviewed_by_identifier = models.CharField(max_length=255, blank=True, default="")
reviewed_at = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
indexes = [
models.Index(fields=["user", "status", "created_at"]),
models.Index(fields=["memory", "created_at"]),
]
class AIResultSignal(models.Model):
"""
@@ -2249,6 +2381,117 @@ class DerivedTaskEvent(models.Model):
]
class TaskArtifactLink(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
task = models.ForeignKey(
DerivedTask,
on_delete=models.CASCADE,
related_name="artifact_links",
)
kind = models.CharField(max_length=64, default="note")
uri = models.CharField(max_length=1024, blank=True, default="")
path = models.CharField(max_length=1024, blank=True, default="")
summary = models.TextField(blank=True, default="")
created_by_identifier = models.CharField(max_length=255, blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
indexes = [
models.Index(fields=["task", "created_at"]),
models.Index(fields=["kind", "created_at"]),
]
class KnowledgeArticle(models.Model):
STATUS_CHOICES = (
("draft", "Draft"),
("published", "Published"),
("archived", "Archived"),
)
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name="knowledge_articles",
)
related_task = models.ForeignKey(
DerivedTask,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="knowledge_articles",
)
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
markdown = models.TextField(blank=True, default="")
tags = models.JSONField(default=list, blank=True)
status = models.CharField(max_length=16, choices=STATUS_CHOICES, default="draft")
owner_identifier = models.CharField(max_length=255, blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["user", "slug"],
name="unique_knowledge_article_slug_per_user",
),
]
indexes = [
models.Index(fields=["user", "status", "updated_at"]),
models.Index(fields=["related_task", "updated_at"]),
]
class KnowledgeRevision(models.Model):
article = models.ForeignKey(
KnowledgeArticle,
on_delete=models.CASCADE,
related_name="revisions",
)
revision = models.PositiveIntegerField()
author_tool = models.CharField(max_length=255, blank=True, default="")
author_identifier = models.CharField(max_length=255, blank=True, default="")
summary = models.TextField(blank=True, default="")
markdown = models.TextField(blank=True, default="")
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["article", "revision"],
name="unique_knowledge_revision_per_article",
)
]
ordering = ["article", "revision"]
class MCPToolAuditLog(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
tool_name = models.CharField(max_length=255)
user = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="mcp_tool_audit_logs",
)
request_args = models.JSONField(default=dict, blank=True)
response_meta = models.JSONField(default=dict, blank=True)
ok = models.BooleanField(default=True)
error = models.TextField(blank=True, default="")
duration_ms = models.PositiveIntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
indexes = [
models.Index(fields=["tool_name", "created_at"]),
models.Index(fields=["user", "created_at"]),
models.Index(fields=["ok", "created_at"]),
]
class ExternalSyncEvent(models.Model):
STATUS_CHOICES = (
("pending", "Pending"),

View File

@@ -1,12 +1,13 @@
{% load static %}
{% load cache %}
{% load page_title %}
<!DOCTYPE html>
<html lang="en-GB">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>{% block browser_title %}{{ request.resolver_match.url_name|default:request.path_info|cut:"_"|cut:"/"|cut:"-"|upper|slice:":3" }}{% endblock %}</title>
<title>{% block browser_title %}{% firstof page_browser_title page_title as explicit_title %}{% if explicit_title %}{{ explicit_title }} · GIA{% else %}{% with route_value=request.resolver_match.url_name|default:request.path_info|humanize_route %}{% if route_value %}{{ route_value }} · GIA{% else %}GIA{% endif %}{% endwith %}{% endif %}{% endblock %}</title>
<link rel="shortcut icon" href="{% static 'favicon.ico' %}">
<link rel="manifest" href="{% static 'manifest.webmanifest' %}">
<link rel="stylesheet" href="{% static 'css/bulma.min.css' %}">

View File

@@ -1,117 +1,208 @@
{% extends "base.html" %}
{% block content %}
<style>
.ai-stat-box {
height: 100%;
min-height: 92px;
margin: 0;
}
</style>
<section class="section">
<div class="container">
<h1 class="title is-4">AI Execution Log</h1>
<p class="subtitle is-6">Tracked model calls and usage metrics for this account.</p>
<article class="box">
<div class="columns is-multiline">
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Total Runs</p><p class="title is-6">{{ stats.total_runs }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">OK</p><p class="title is-6 has-text-success">{{ stats.total_ok }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Failed</p><p class="title is-6 has-text-danger">{{ stats.total_failed }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Success Rate</p><p class="title is-6 has-text-info">{{ stats.success_rate }}%</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">24h Runs</p><p class="title is-6">{{ stats.last_24h_runs }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">24h Failed</p><p class="title is-6 has-text-warning">{{ stats.last_24h_failed }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">7d Runs</p><p class="title is-6">{{ stats.last_7d_runs }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Avg Duration</p><p class="title is-6">{{ stats.avg_duration_ms }}ms</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Prompt Chars</p><p class="title is-6">{{ stats.total_prompt_chars }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Response Chars</p><p class="title is-6">{{ stats.total_response_chars }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Avg Prompt</p><p class="title is-6">{{ stats.avg_prompt_chars }}</p></div></div>
<div class="column is-6-mobile is-4-tablet is-3-desktop"><div class="box ai-stat-box"><p class="heading">Avg Response</p><p class="title is-6">{{ stats.avg_response_chars }}</p></div></div>
<div class="level">
<div class="level-left">
<div class="level-item">
<div>
<h1 class="title is-4">AI Execution Log</h1>
<p class="subtitle is-6">Tracked model calls and usage metrics for this account.</p>
</div>
</div>
</div>
<div class="level-right">
<div class="level-item">
{% if stats.total_runs %}
<span class="tag is-success is-light">Tracking Active</span>
{% else %}
<span class="tag is-warning is-light">No Runs Yet</span>
{% endif %}
</div>
</div>
</div>
<article class="notification is-light">
<p class="is-size-7 has-text-grey-dark">Execution health at a glance</p>
<div class="tags mt-2">
<span class="tag is-light">Total {{ stats.total_runs }}</span>
<span class="tag is-success is-light">OK {{ stats.total_ok }}</span>
<span class="tag is-danger is-light">Failed {{ stats.total_failed }}</span>
<span class="tag is-info is-light">24h {{ stats.last_24h_runs }}</span>
<span class="tag is-warning is-light">24h Failed {{ stats.last_24h_failed }}</span>
<span class="tag is-link is-light">7d {{ stats.last_7d_runs }}</span>
</div>
<p class="is-size-7 has-text-grey-dark mt-3">Success Rate</p>
<progress class="progress is-link is-small" value="{{ stats.success_rate }}" max="100">{{ stats.success_rate }}%</progress>
</article>
<div class="columns">
<div class="column is-6">
<article class="box">
<h2 class="title is-6">By Operation</h2>
<table class="table is-fullwidth is-size-7 is-striped">
<thead>
<tr><th>Operation</th><th>Total</th><th>OK</th><th>Failed</th></tr>
</thead>
<tbody>
{% for row in operation_breakdown %}
<tr>
<td>{{ row.operation|default:"(none)" }}</td>
<td>{{ row.total }}</td>
<td>{{ row.ok }}</td>
<td>{{ row.failed }}</td>
</tr>
{% empty %}
<tr><td colspan="4">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
<div class="columns is-multiline">
<div class="column is-12-tablet is-4-desktop">
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">Reliability</p>
</header>
<div class="card-content">
<table class="table is-fullwidth is-narrow is-size-7">
<tbody>
<tr><th>Total Runs</th><td>{{ stats.total_runs }}</td></tr>
<tr><th>OK</th><td class="has-text-success">{{ stats.total_ok }}</td></tr>
<tr><th>Failed</th><td class="has-text-danger">{{ stats.total_failed }}</td></tr>
<tr><th>Success Rate</th><td>{{ stats.success_rate }}%</td></tr>
</tbody>
</table>
</div>
</article>
</div>
<div class="column is-6">
<article class="box">
<h2 class="title is-6">By Model</h2>
<table class="table is-fullwidth is-size-7 is-striped">
<thead>
<tr><th>Model</th><th>Total</th><th>OK</th><th>Failed</th></tr>
</thead>
<tbody>
{% for row in model_breakdown %}
<tr>
<td>{{ row.model|default:"(none)" }}</td>
<td>{{ row.total }}</td>
<td>{{ row.ok }}</td>
<td>{{ row.failed }}</td>
</tr>
{% empty %}
<tr><td colspan="4">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
<div class="column is-12-tablet is-4-desktop">
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">Throughput</p>
</header>
<div class="card-content">
<table class="table is-fullwidth is-narrow is-size-7">
<tbody>
<tr><th>Runs (24h)</th><td>{{ stats.last_24h_runs }}</td></tr>
<tr><th>Failed (24h)</th><td>{{ stats.last_24h_failed }}</td></tr>
<tr><th>Runs (7d)</th><td>{{ stats.last_7d_runs }}</td></tr>
<tr><th>Avg Duration</th><td>{{ stats.avg_duration_ms }}ms</td></tr>
</tbody>
</table>
</div>
</article>
</div>
<div class="column is-12-tablet is-4-desktop">
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">Token Proxy (Chars)</p>
</header>
<div class="card-content">
<table class="table is-fullwidth is-narrow is-size-7">
<tbody>
<tr><th>Total Prompt</th><td>{{ stats.total_prompt_chars }}</td></tr>
<tr><th>Total Response</th><td>{{ stats.total_response_chars }}</td></tr>
<tr><th>Avg Prompt</th><td>{{ stats.avg_prompt_chars }}</td></tr>
<tr><th>Avg Response</th><td>{{ stats.avg_response_chars }}</td></tr>
</tbody>
</table>
</div>
</article>
</div>
</div>
<article class="box">
<h2 class="title is-6">Recent Runs</h2>
<div class="table-container">
<table class="table is-fullwidth is-size-7 is-striped">
<thead>
<tr>
<th>Started</th>
<th>Status</th>
<th>Operation</th>
<th>Model</th>
<th>Messages</th>
<th>Prompt</th>
<th>Response</th>
<th>Duration</th>
<th>Error</th>
</tr>
</thead>
<tbody>
{% for run in runs %}
<div class="columns">
<div class="column is-6">
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">By Operation</p>
</header>
<div class="card-content">
<div class="table-container">
<table class="table is-fullwidth is-size-7 is-striped is-hoverable">
<thead>
<tr><th>Operation</th><th>Total</th><th>OK</th><th>Failed</th></tr>
</thead>
<tbody>
{% for row in operation_breakdown %}
<tr>
<td>{{ row.operation|default:"(none)" }}</td>
<td>{{ row.total }}</td>
<td class="has-text-success">{{ row.ok }}</td>
<td class="has-text-danger">{{ row.failed }}</td>
</tr>
{% empty %}
<tr><td colspan="4">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</article>
</div>
<div class="column is-6">
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">By Model</p>
</header>
<div class="card-content">
<div class="table-container">
<table class="table is-fullwidth is-size-7 is-striped is-hoverable">
<thead>
<tr><th>Model</th><th>Total</th><th>OK</th><th>Failed</th></tr>
</thead>
<tbody>
{% for row in model_breakdown %}
<tr>
<td>{{ row.model|default:"(none)" }}</td>
<td>{{ row.total }}</td>
<td class="has-text-success">{{ row.ok }}</td>
<td class="has-text-danger">{{ row.failed }}</td>
</tr>
{% empty %}
<tr><td colspan="4">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</article>
</div>
</div>
<article class="card">
<header class="card-header">
<p class="card-header-title is-size-6">Recent Runs</p>
</header>
<div class="card-content">
<div class="table-container">
<table class="table is-fullwidth is-size-7 is-striped is-hoverable">
<thead>
<tr>
<td>{{ run.started_at }}</td>
<td>{{ run.status }}</td>
<td>{{ run.operation|default:"-" }}</td>
<td>{{ run.model|default:"-" }}</td>
<td>{{ run.message_count }}</td>
<td>{{ run.prompt_chars }}</td>
<td>{{ run.response_chars }}</td>
<td>{% if run.duration_ms %}{{ run.duration_ms }}ms{% else %}-{% endif %}</td>
<td style="max-width: 26rem; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;" title="{{ run.error }}">{{ run.error|default:"-" }}</td>
<th>Started</th>
<th>Status</th>
<th>Operation</th>
<th>Model</th>
<th>Messages</th>
<th>Prompt</th>
<th>Response</th>
<th>Duration</th>
<th>Error</th>
</tr>
{% empty %}
<tr><td colspan="9">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
</thead>
<tbody>
{% for run in runs %}
<tr>
<td>{{ run.started_at }}</td>
<td>
{% if run.status == "ok" %}
<span class="tag is-success is-light">ok</span>
{% elif run.status == "failed" %}
<span class="tag is-danger is-light">failed</span>
{% else %}
<span class="tag is-light">{{ run.status }}</span>
{% endif %}
</td>
<td>{{ run.operation|default:"-" }}</td>
<td>{{ run.model|default:"-" }}</td>
<td>{{ run.message_count }}</td>
<td>{{ run.prompt_chars }}</td>
<td>{{ run.response_chars }}</td>
<td>{% if run.duration_ms %}{{ run.duration_ms }}ms{% else %}-{% endif %}</td>
<td>
{% if run.error %}
<span title="{{ run.error }}">{{ run.error|truncatechars:120 }}</span>
{% else %}
-
{% endif %}
</td>
</tr>
{% empty %}
<tr><td colspan="9">No runs yet.</td></tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</article>
</div>

View File

@@ -381,6 +381,47 @@
</span>
{% endfor %}
</div>
<div
id="{{ panel_id }}-availability-summary"
class="tags are-small mt-1{% if not availability_summary %} is-hidden{% endif %}"
data-summary='{{ availability_summary_json|default:"{}"|escapejs }}'
aria-label="Contact availability summary">
{% if availability_summary %}
<span class="tag is-light {% if availability_summary.state == 'available' %}is-success{% elif availability_summary.state == 'fading' %}is-warning{% elif availability_summary.state == 'unavailable' %}is-danger{% endif %}">
{{ availability_summary.state_label }}
</span>
<span class="tag is-light">{{ availability_summary.service|upper|default:"-" }}</span>
{% if availability_summary.ts_label %}
<span class="tag is-light">Updated {{ availability_summary.ts_label }}</span>
{% endif %}
{% if availability_summary.is_cross_service %}
<span class="tag is-light">Cross-service fallback</span>
{% endif %}
{% endif %}
</div>
<div class="compose-history-nav" role="tablist" aria-label="Conversation views">
<button
type="button"
class="compose-history-tab is-active"
data-target="thread"
aria-selected="true">
Thread
</button>
<button
type="button"
class="compose-history-tab"
data-target="deleted"
aria-selected="false">
Deleted
<span id="{{ panel_id }}-deleted-count" class="compose-history-count">0</span>
</button>
</div>
<div id="{{ panel_id }}-deleted" class="compose-deleted-pane is-hidden">
<p id="{{ panel_id }}-deleted-empty" class="compose-empty">No deleted messages noted yet.</p>
<div id="{{ panel_id }}-deleted-list" class="compose-deleted-list"></div>
</div>
<div
id="{{ panel_id }}-thread"
@@ -397,12 +438,14 @@
data-quick-insights-url="{{ compose_quick_insights_url }}"
data-history-sync-url="{{ compose_history_sync_url }}"
data-react-url="{% url 'compose_react' %}"
data-capability-reactions="{% if capability_reactions %}1{% else %}0{% endif %}"
data-capability-reactions-reason="{{ capability_reactions_reason|default:''|escape }}"
data-reaction-actor-prefix="web:{{ request.user.id }}:"
data-toggle-command-url="{{ compose_toggle_command_url }}"
data-engage-preview-url="{{ compose_engage_preview_url }}"
data-engage-send-url="{{ compose_engage_send_url }}">
{% for msg in serialized_messages %}
<div class="compose-row {% if msg.outgoing %}is-out{% else %}is-in{% endif %}" data-ts="{{ msg.ts }}" data-message-id="{{ msg.id }}" data-author="{{ msg.author|default:''|escape }}" data-sender-uuid="{{ msg.sender_uuid|default:''|escape }}" data-display-ts="{{ msg.display_ts|escape }}" data-source-service="{{ msg.source_service|default:''|escape }}" data-source-label="{{ msg.source_label|default:''|escape }}" data-source-message-id="{{ msg.source_message_id|default:''|escape }}" data-direction="{% if msg.outgoing %}outgoing{% else %}incoming{% endif %}"{% if msg.reply_to_id %} data-reply-to-id="{{ msg.reply_to_id }}"{% endif %} data-reply-snippet="{{ msg.display_text|default:msg.text|default:''|truncatechars:120|escape }}">
<div class="compose-row {% if msg.outgoing %}is-out{% else %}is-in{% endif %}{% if msg.is_deleted %} is-deleted{% endif %}" data-ts="{{ msg.ts }}" data-message-id="{{ msg.id }}" data-author="{{ msg.author|default:''|escape }}" data-sender-uuid="{{ msg.sender_uuid|default:''|escape }}" data-display-ts="{{ msg.display_ts|escape }}" data-source-service="{{ msg.source_service|default:''|escape }}" data-source-label="{{ msg.source_label|default:''|escape }}" data-source-message-id="{{ msg.source_message_id|default:''|escape }}" data-direction="{% if msg.outgoing %}outgoing{% else %}incoming{% endif %}" data-is-deleted="{% if msg.is_deleted %}1{% else %}0{% endif %}" data-deleted-ts="{{ msg.deleted_ts|default:0 }}" data-deleted-display="{{ msg.deleted_display|default:''|escape }}" data-deleted-actor="{{ msg.deleted_actor|default:''|escape }}" data-deleted-source="{{ msg.deleted_source_service|default:''|escape }}" data-edit-count="{{ msg.edit_count|default:0 }}" data-edit-history="{{ msg.edit_history_json|default:'[]'|escapejs }}" data-raw-text="{{ msg.text|default:''|truncatechars:220|escape }}"{% if msg.reply_to_id %} data-reply-to-id="{{ msg.reply_to_id }}"{% endif %} data-reply-snippet="{{ msg.display_text|default:msg.text|default:''|truncatechars:120|escape }}">
{% if msg.gap_fragments %}
{% with gap=msg.gap_fragments.0 %}
<p
@@ -460,7 +503,26 @@
{% else %}
<p class="compose-body compose-image-fallback is-hidden">(no text)</p>
{% endif %}
{% if service == "signal" or service == "whatsapp" %}
{% if msg.edit_count %}
<details class="compose-edit-history">
<summary>Edited {{ msg.edit_count }} time{% if msg.edit_count != 1 %}s{% endif %}</summary>
<ul>
{% for edit in msg.edit_history %}
<li>
{% if edit.edited_display %}{{ edit.edited_display }}{% else %}Unknown time{% endif %}
{% if edit.actor %} · {{ edit.actor }}{% endif %}
{% if edit.source_service %} · {{ edit.source_service|upper }}{% endif %}
<div class="compose-edit-diff">
<span class="compose-edit-old">{{ edit.previous_text|default:"(empty)" }}</span>
<span class="compose-edit-arrow"></span>
<span class="compose-edit-new">{{ edit.new_text|default:"(empty)" }}</span>
</div>
</li>
{% endfor %}
</ul>
</details>
{% endif %}
{% if capability_reactions %}
<div class="compose-reaction-actions" data-message-id="{{ msg.id }}">
<button type="button" class="compose-react-btn" data-emoji="👍" title="React with thumbs up">👍</button>
<button type="button" class="compose-react-btn" data-emoji="❤️" title="React with heart">❤️</button>
@@ -495,6 +557,12 @@
{% endif %}
<p class="compose-msg-meta">
{{ msg.display_ts }}{% if msg.author %} · {{ msg.author }}{% endif %}
{% if msg.is_edited %}
<span class="compose-msg-flag is-edited" title="Message edited{% if msg.last_edit_display %} at {{ msg.last_edit_display }}{% endif %}">edited</span>
{% endif %}
{% if msg.is_deleted %}
<span class="compose-msg-flag is-deleted" title="Deleted{% if msg.deleted_display %} at {{ msg.deleted_display }}{% endif %}{% if msg.deleted_actor %} by {{ msg.deleted_actor }}{% endif %}">deleted</span>
{% endif %}
{% if msg.read_ts %}
<span
class="compose-ticks js-receipt-trigger"
@@ -561,8 +629,11 @@
<input type="hidden" name="failsafe_confirm" value="0">
<div class="compose-send-safety">
<label class="checkbox is-size-7">
<input type="checkbox" class="manual-confirm"> Confirm Send
<input type="checkbox" class="manual-confirm"{% if not capability_send %} disabled{% endif %}> Confirm Send
</label>
{% if not capability_send %}
<p class="help is-size-7 has-text-grey">Send disabled: {{ capability_send_reason }}</p>
{% endif %}
</div>
<div id="{{ panel_id }}-reply-banner" class="compose-reply-banner is-hidden">
<span class="compose-reply-banner-label">Replying to:</span>
@@ -576,7 +647,7 @@
name="text"
rows="1"
placeholder="Type a message. Enter to send, Shift+Enter for newline."></textarea>
<button class="button is-link is-light compose-send-btn" type="submit" disabled>
<button class="button is-link is-light compose-send-btn" type="submit" disabled{% if not capability_send %} title="{{ capability_send_reason }}"{% endif %}>
<span class="icon is-small"><i class="{{ manual_icon_class }}"></i></span>
<span>Send</span>
</button>
@@ -605,6 +676,134 @@
padding: 0.65rem;
background: linear-gradient(180deg, rgba(248, 250, 252, 0.7), rgba(255, 255, 255, 0.98));
}
#{{ panel_id }} .compose-history-nav {
margin-top: 0.45rem;
display: inline-flex;
gap: 0.35rem;
}
#{{ panel_id }} .compose-history-tab {
border: 1px solid rgba(38, 68, 111, 0.24);
background: #f3f7fc;
color: #2b4364;
border-radius: 999px;
padding: 0.2rem 0.58rem;
font-size: 0.68rem;
font-weight: 600;
line-height: 1.1;
cursor: pointer;
}
#{{ panel_id }} .compose-history-tab.is-active {
background: #2b4f7a;
color: #fff;
border-color: #2b4f7a;
}
#{{ panel_id }} .compose-history-count {
margin-left: 0.22rem;
display: inline-block;
min-width: 1.05rem;
text-align: center;
border-radius: 999px;
background: rgba(255, 255, 255, 0.35);
font-size: 0.62rem;
padding: 0.03rem 0.24rem;
}
#{{ panel_id }} .compose-deleted-pane {
margin-top: 0.55rem;
margin-bottom: 0.55rem;
min-height: 8rem;
max-height: 46vh;
overflow-y: auto;
border: 1px solid rgba(0, 0, 0, 0.12);
border-radius: 8px;
padding: 0.6rem;
background: linear-gradient(180deg, rgba(253, 248, 247, 0.85), rgba(255, 255, 255, 0.98));
}
#{{ panel_id }} .compose-deleted-pane.is-hidden {
display: none;
}
#{{ panel_id }} .compose-deleted-item {
border: 1px solid rgba(181, 96, 80, 0.2);
border-radius: 8px;
padding: 0.4rem 0.5rem;
margin-bottom: 0.4rem;
background: rgba(255, 248, 247, 0.98);
}
#{{ panel_id }} .compose-deleted-item:last-child {
margin-bottom: 0;
}
#{{ panel_id }} .compose-deleted-meta {
display: flex;
flex-wrap: wrap;
gap: 0.28rem;
font-size: 0.65rem;
color: #7b4c42;
margin-bottom: 0.18rem;
}
#{{ panel_id }} .compose-deleted-preview {
margin: 0;
font-size: 0.71rem;
color: #4b3b38;
white-space: pre-wrap;
word-break: break-word;
}
#{{ panel_id }} .compose-deleted-jump {
margin-top: 0.3rem;
}
#{{ panel_id }} .compose-msg-flag {
display: inline-block;
margin-left: 0.3rem;
border-radius: 999px;
padding: 0.03rem 0.34rem;
font-size: 0.58rem;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.02em;
}
#{{ panel_id }} .compose-msg-flag.is-edited {
color: #7d5010;
background: rgba(255, 241, 214, 0.95);
border: 1px solid rgba(169, 115, 31, 0.28);
}
#{{ panel_id }} .compose-msg-flag.is-deleted {
color: #842f2f;
background: rgba(255, 228, 228, 0.95);
border: 1px solid rgba(173, 52, 52, 0.28);
}
#{{ panel_id }} .compose-edit-history {
margin-top: 0.28rem;
border-radius: 8px;
border: 1px solid rgba(124, 102, 63, 0.25);
background: rgba(255, 252, 245, 0.96);
padding: 0.24rem 0.4rem;
font-size: 0.64rem;
}
#{{ panel_id }} .compose-edit-history summary {
cursor: pointer;
color: #7a5a22;
font-weight: 600;
}
#{{ panel_id }} .compose-edit-history ul {
margin: 0.25rem 0 0;
padding-left: 1.05rem;
}
#{{ panel_id }} .compose-edit-diff {
margin-top: 0.08rem;
display: flex;
gap: 0.22rem;
align-items: baseline;
}
#{{ panel_id }} .compose-edit-old {
color: #6e6a66;
text-decoration: line-through;
}
#{{ panel_id }} .compose-edit-new {
color: #2f4f78;
font-weight: 600;
}
#{{ panel_id }} .compose-row.is-deleted .compose-bubble {
border-style: dashed;
opacity: 0.96;
}
#{{ panel_id }} .compose-availability-lane {
margin-top: 0.42rem;
display: flex;
@@ -1932,6 +2131,12 @@
const exportClear = document.getElementById(panelId + "-export-clear");
const exportBuffer = document.getElementById(panelId + "-export-buffer");
const availabilityLane = document.getElementById(panelId + "-availability");
const availabilitySummaryNode = document.getElementById(panelId + "-availability-summary");
const deletedPane = document.getElementById(panelId + "-deleted");
const deletedList = document.getElementById(panelId + "-deleted-list");
const deletedEmpty = document.getElementById(panelId + "-deleted-empty");
const deletedCountNode = document.getElementById(panelId + "-deleted-count");
const historyTabs = Array.from(panel.querySelectorAll(".compose-history-tab"));
const csrfToken = "{{ csrf_token }}";
if (lightbox && lightbox.parentElement !== document.body) {
document.body.appendChild(lightbox);
@@ -1976,6 +2181,7 @@
rangeStartId: "",
rangeEndId: "",
rangeMode: "inside",
historyView: "thread",
};
window.giaComposePanels[panelId] = panelState;
const triggerButtons = Array.from(panel.querySelectorAll(".js-ai-trigger"));
@@ -2014,6 +2220,13 @@
const parsed = parseInt(value || "0", 10);
return Number.isFinite(parsed) ? parsed : 0;
};
const parseJsonSafe = function (value, fallback) {
try {
return JSON.parse(String(value || ""));
} catch (_err) {
return fallback;
}
};
const minuteBucketFromTs = function (tsValue) {
const ts = toInt(tsValue);
@@ -2848,9 +3061,9 @@
const QUICK_REACTION_EMOJIS = ["👍", "❤️", "😂", "😮", "😢", "😡"];
const supportsReactions = function () {
const service = String(thread.dataset.service || "").trim().toLowerCase();
const reactUrl = String(thread.dataset.reactUrl || "").trim();
return !!reactUrl && (service === "signal" || service === "whatsapp");
const capability = String(thread.dataset.capabilityReactions || "").trim() === "1";
return !!reactUrl && capability;
};
const reactionActorKeyForService = function (service) {
const prefix = String(thread.dataset.reactionActorPrefix || "web::");
@@ -2977,6 +3190,46 @@
bar.appendChild(menu);
return bar;
};
const renderEditHistoryDetails = function (bubble, msg) {
if (!bubble) {
return;
}
const rows = Array.isArray(msg && msg.edit_history) ? msg.edit_history : [];
if (!rows.length) {
return;
}
const details = document.createElement("details");
details.className = "compose-edit-history";
const summary = document.createElement("summary");
summary.textContent = "Edited " + rows.length + (rows.length === 1 ? " time" : " times");
details.appendChild(summary);
const list = document.createElement("ul");
rows.forEach(function (entry) {
const li = document.createElement("li");
const editedDisplay = String((entry && entry.edited_display) || "").trim() || "Unknown time";
const actor = String((entry && entry.actor) || "").trim();
const source = String((entry && entry.source_service) || "").trim();
li.textContent = editedDisplay + (actor ? (" · " + actor) : "") + (source ? (" · " + source.toUpperCase()) : "");
const diff = document.createElement("div");
diff.className = "compose-edit-diff";
const oldNode = document.createElement("span");
oldNode.className = "compose-edit-old";
oldNode.textContent = String((entry && entry.previous_text) || "(empty)");
const arrow = document.createElement("span");
arrow.className = "compose-edit-arrow";
arrow.textContent = "->";
const newNode = document.createElement("span");
newNode.className = "compose-edit-new";
newNode.textContent = String((entry && entry.new_text) || "(empty)");
diff.appendChild(oldNode);
diff.appendChild(arrow);
diff.appendChild(newNode);
li.appendChild(diff);
list.appendChild(li);
});
details.appendChild(list);
bubble.appendChild(details);
};
const appendBubble = function (msg) {
const messageId = String(msg && msg.id ? msg.id : "").trim();
@@ -2991,6 +3244,9 @@
const row = document.createElement("div");
const outgoing = !!msg.outgoing;
row.className = "compose-row " + (outgoing ? "is-out" : "is-in");
if (msg.is_deleted) {
row.classList.add("is-deleted");
}
row.dataset.ts = String(msg.ts || 0);
row.dataset.minute = minuteBucketFromTs(msg.ts || 0);
row.dataset.replySnippet = normalizeSnippet(
@@ -3003,6 +3259,20 @@
row.dataset.sourceLabel = String(msg.source_label || "");
row.dataset.sourceMessageId = String(msg.source_message_id || "");
row.dataset.direction = outgoing ? "outgoing" : "incoming";
row.dataset.isDeleted = msg.is_deleted ? "1" : "0";
row.dataset.deletedTs = String(msg.deleted_ts || 0);
row.dataset.deletedDisplay = String(msg.deleted_display || "");
row.dataset.deletedActor = String(msg.deleted_actor || "");
row.dataset.deletedSource = String(msg.deleted_source_service || "");
row.dataset.editCount = String(msg.edit_count || 0);
try {
row.dataset.editHistory = JSON.stringify(
Array.isArray(msg.edit_history) ? msg.edit_history : []
);
} catch (_err) {
row.dataset.editHistory = "[]";
}
row.dataset.rawText = String(msg.text || "");
if (msg.reply_to_id) {
row.dataset.replyToId = String(msg.reply_to_id || "");
}
@@ -3078,6 +3348,7 @@
fallback.textContent = "(no text)";
bubble.appendChild(fallback);
}
renderEditHistoryDetails(bubble, msg);
const reactionBar = buildReactionActions(messageId);
if (reactionBar) {
bubble.appendChild(reactionBar);
@@ -3094,6 +3365,22 @@
metaText += " · " + String(msg.author);
}
meta.textContent = metaText;
if (msg.is_edited) {
const editedFlag = document.createElement("span");
editedFlag.className = "compose-msg-flag is-edited";
editedFlag.title = "Message edited" + (msg.last_edit_display ? (" at " + String(msg.last_edit_display)) : "");
editedFlag.textContent = "edited";
meta.appendChild(editedFlag);
}
if (msg.is_deleted) {
const deletedFlag = document.createElement("span");
deletedFlag.className = "compose-msg-flag is-deleted";
deletedFlag.title = "Deleted"
+ (msg.deleted_display ? (" at " + String(msg.deleted_display)) : "")
+ (msg.deleted_actor ? (" by " + String(msg.deleted_actor)) : "");
deletedFlag.textContent = "deleted";
meta.appendChild(deletedFlag);
}
// Render delivery/read ticks and a small time label when available.
if (msg.read_ts) {
const tickWrap = document.createElement("span");
@@ -3164,6 +3451,7 @@
wireImageFallbacks(row);
bindReplyReferences(row);
updateGlanceFromMessage(msg);
renderDeletedList();
};
// Receipt popover (similar to contact info popover)
@@ -3444,6 +3732,69 @@
availabilityLane.classList.remove("is-hidden");
};
const renderAvailabilitySummary = function (summary, slices) {
if (!availabilitySummaryNode) {
return;
}
const rows = Array.isArray(slices) ? slices : [];
let source = (
summary && typeof summary === "object"
) ? Object.assign({}, summary) : {};
if (!source.state && rows.length > 0) {
const newest = rows.reduce(function (best, item) {
if (!best) return item;
return Number(item.end_ts || 0) > Number(best.end_ts || 0) ? item : best;
}, null);
if (newest) {
source.state = String(newest.state || "unknown").toLowerCase();
source.state_label = source.state.charAt(0).toUpperCase() + source.state.slice(1);
source.service = String(newest.service || "").toLowerCase();
source.confidence = Number(newest.confidence_end || 0);
source.source_kind = String(
((newest.payload && newest.payload.inferred_from) || (newest.payload && newest.payload.extended_by) || "")
).trim();
source.ts = Number(newest.end_ts || 0);
source.ts_label = source.ts > 0 ? new Date(source.ts).toLocaleTimeString() : "";
}
}
if (!source.state) {
availabilitySummaryNode.classList.add("is-hidden");
availabilitySummaryNode.innerHTML = "";
return;
}
const state = String(source.state || "unknown").toLowerCase();
const service = String(source.service || "").toUpperCase();
const stateTag = document.createElement("span");
stateTag.className = "tag is-light";
if (state === "available") stateTag.classList.add("is-success");
if (state === "fading") stateTag.classList.add("is-warning");
if (state === "unavailable") stateTag.classList.add("is-danger");
stateTag.textContent = String(source.state_label || (state.charAt(0).toUpperCase() + state.slice(1)));
const serviceTag = document.createElement("span");
serviceTag.className = "tag is-light";
serviceTag.textContent = service || "-";
availabilitySummaryNode.innerHTML = "";
availabilitySummaryNode.appendChild(stateTag);
availabilitySummaryNode.appendChild(serviceTag);
if (source.ts_label) {
const tsTag = document.createElement("span");
tsTag.className = "tag is-light";
tsTag.textContent = "Updated " + String(source.ts_label);
availabilitySummaryNode.appendChild(tsTag);
}
if (source.is_cross_service) {
const fallbackTag = document.createElement("span");
fallbackTag.className = "tag is-light";
fallbackTag.textContent = "Cross-service fallback";
availabilitySummaryNode.appendChild(fallbackTag);
}
availabilitySummaryNode.classList.remove("is-hidden");
};
const applyTyping = function (typingPayload) {
if (!typingNode || !typingPayload || typeof typingPayload !== "object") {
return;
@@ -3481,6 +3832,9 @@
if (payload.availability_slices) {
renderAvailabilitySlices(payload.availability_slices);
}
if (payload.availability_summary || payload.availability_slices) {
renderAvailabilitySummary(payload.availability_summary, payload.availability_slices);
}
if (payload.last_ts !== undefined && payload.last_ts !== null) {
lastTs = Math.max(lastTs, toInt(payload.last_ts));
thread.dataset.lastTs = String(lastTs);
@@ -3521,6 +3875,9 @@
if (payload.availability_slices) {
renderAvailabilitySlices(payload.availability_slices);
}
if (payload.availability_summary || payload.availability_slices) {
renderAvailabilitySummary(payload.availability_summary, payload.availability_slices);
}
if (payload.last_ts !== undefined && payload.last_ts !== null) {
lastTs = Math.max(lastTs, toInt(payload.last_ts));
thread.dataset.lastTs = String(lastTs);
@@ -3544,6 +3901,7 @@
const armInput = form.querySelector("input[name='failsafe_arm']");
const confirmInput = form.querySelector("input[name='failsafe_confirm']");
const sendButton = form.querySelector(".compose-send-btn");
const sendCapabilityEnabled = {{ capability_send|yesno:"true,false" }};
const updateManualSafety = function () {
const confirm = !!(manualConfirm && manualConfirm.checked);
if (armInput) {
@@ -3553,7 +3911,7 @@
confirmInput.value = confirm ? "1" : "0";
}
if (sendButton) {
sendButton.disabled = !confirm;
sendButton.disabled = (!confirm) || (!sendCapabilityEnabled);
}
};
if (manualConfirm) {
@@ -3561,14 +3919,21 @@
}
updateManualSafety();
try {
const initialTyping = JSON.parse("{{ typing_state_json|escapejs }}");
applyTyping(initialTyping);
try {
const initialSlices = JSON.parse(String((availabilityLane && availabilityLane.dataset.slices) || "[]"));
renderAvailabilitySlices(initialSlices);
} catch (err) {
renderAvailabilitySlices([]);
}
const initialTyping = JSON.parse("{{ typing_state_json|escapejs }}");
applyTyping(initialTyping);
try {
const initialSlices = JSON.parse(
String((availabilityLane && availabilityLane.dataset.slices) || "[]")
);
const initialSummary = JSON.parse(
String((availabilitySummaryNode && availabilitySummaryNode.dataset.summary) || "{}")
);
renderAvailabilitySlices(initialSlices);
renderAvailabilitySummary(initialSummary, initialSlices);
} catch (err) {
renderAvailabilitySlices([]);
renderAvailabilitySummary({}, []);
}
} catch (err) {
// Ignore invalid initial typing state payload.
}
@@ -3692,6 +4057,120 @@
const allMessageRows = function () {
return Array.from(thread.querySelectorAll(".compose-row[data-message-id]"));
};
const readRowEditHistory = function (row) {
if (!row || !row.dataset) {
return [];
}
const parsed = parseJsonSafe(row.dataset.editHistory || "[]", []);
return Array.isArray(parsed) ? parsed : [];
};
const renderDeletedList = function () {
if (!deletedList || !deletedEmpty) {
return;
}
const deletedRows = allMessageRows()
.filter(function (row) {
return String(row.dataset.isDeleted || "0") === "1";
})
.sort(function (a, b) {
return toInt(b.dataset.deletedTs || 0) - toInt(a.dataset.deletedTs || 0);
});
if (deletedCountNode) {
deletedCountNode.textContent = String(deletedRows.length);
}
deletedList.innerHTML = "";
if (!deletedRows.length) {
deletedEmpty.classList.remove("is-hidden");
return;
}
deletedEmpty.classList.add("is-hidden");
deletedRows.forEach(function (row) {
const messageId = String(row.dataset.messageId || "").trim();
const deletedTs = String(row.dataset.deletedDisplay || "").trim() || String(row.dataset.displayTs || "").trim() || "-";
const deletedActor = String(row.dataset.deletedActor || "").trim() || "unknown";
const deletedSource = String(row.dataset.deletedSource || "").trim() || "unknown";
const preview = normalizeSnippet(row.dataset.rawText || row.dataset.replySnippet || "(message deleted)");
const edits = readRowEditHistory(row);
const card = document.createElement("article");
card.className = "compose-deleted-item";
const meta = document.createElement("p");
meta.className = "compose-deleted-meta";
meta.textContent = "Deleted " + deletedTs + " by " + deletedActor + " via " + deletedSource.toUpperCase();
card.appendChild(meta);
const text = document.createElement("p");
text.className = "compose-deleted-preview";
text.textContent = preview;
card.appendChild(text);
if (edits.length) {
const details = document.createElement("details");
details.className = "compose-edit-history";
const summary = document.createElement("summary");
summary.textContent = "Edit history (" + edits.length + ")";
details.appendChild(summary);
const list = document.createElement("ul");
edits.forEach(function (entry) {
const li = document.createElement("li");
const editedDisplay = String((entry && entry.edited_display) || "").trim() || "Unknown time";
const actor = String((entry && entry.actor) || "").trim();
const source = String((entry && entry.source_service) || "").trim();
const oldText = String((entry && entry.previous_text) || "(empty)");
const newText = String((entry && entry.new_text) || "(empty)");
li.textContent = editedDisplay + (actor ? (" · " + actor) : "") + (source ? (" · " + source.toUpperCase()) : "");
const diff = document.createElement("div");
diff.className = "compose-edit-diff";
const oldNode = document.createElement("span");
oldNode.className = "compose-edit-old";
oldNode.textContent = oldText;
const arrow = document.createElement("span");
arrow.className = "compose-edit-arrow";
arrow.textContent = "->";
const newNode = document.createElement("span");
newNode.className = "compose-edit-new";
newNode.textContent = newText;
diff.appendChild(oldNode);
diff.appendChild(arrow);
diff.appendChild(newNode);
li.appendChild(diff);
list.appendChild(li);
});
details.appendChild(list);
card.appendChild(details);
}
const jump = document.createElement("button");
jump.type = "button";
jump.className = "button is-light is-small compose-deleted-jump";
jump.dataset.targetId = messageId;
jump.textContent = "Jump to message";
card.appendChild(jump);
deletedList.appendChild(card);
});
};
const switchHistoryView = function (viewName) {
const target = String(viewName || "thread").trim().toLowerCase() === "deleted"
? "deleted"
: "thread";
panelState.historyView = target;
historyTabs.forEach(function (tab) {
const active = String(tab.dataset.target || "") === target;
tab.classList.toggle("is-active", active);
tab.setAttribute("aria-selected", active ? "true" : "false");
});
if (target === "deleted") {
if (thread) {
thread.classList.add("is-hidden");
}
if (deletedPane) {
deletedPane.classList.remove("is-hidden");
}
} else {
if (thread) {
thread.classList.remove("is-hidden");
}
if (deletedPane) {
deletedPane.classList.add("is-hidden");
}
}
};
const selectedRangeRows = function () {
const rows = allMessageRows();
@@ -4172,7 +4651,33 @@
});
});
};
historyTabs.forEach(function (tab) {
tab.addEventListener("click", function () {
switchHistoryView(String(tab.dataset.target || "thread"));
});
});
if (deletedList) {
deletedList.addEventListener("click", function (ev) {
const jumpBtn = ev.target.closest && ev.target.closest(".compose-deleted-jump");
if (!jumpBtn) {
return;
}
const targetId = String(jumpBtn.dataset.targetId || "").trim();
if (!targetId) {
return;
}
switchHistoryView("thread");
const row = rowByMessageId(targetId);
if (!row) {
return;
}
row.scrollIntoView({ behavior: "smooth", block: "center" });
flashReplyTarget(row);
});
}
bindReplyReferences(panel);
renderDeletedList();
switchHistoryView(panelState.historyView);
initExportUi();
if (replyClearBtn) {
replyClearBtn.addEventListener("click", function () {
@@ -4264,6 +4769,16 @@
panelState.websocketReady = false;
hideAllCards();
thread.innerHTML = '<p class="compose-empty">Loading messages...</p>';
if (deletedList) {
deletedList.innerHTML = "";
}
if (deletedEmpty) {
deletedEmpty.classList.remove("is-hidden");
}
if (deletedCountNode) {
deletedCountNode.textContent = "0";
}
switchHistoryView("thread");
lastTs = 0;
thread.dataset.lastTs = "0";
panelState.seenMessageIds = new Set();

View File

@@ -0,0 +1,18 @@
import re
from django import template
register = template.Library()
@register.filter
def humanize_route(value):
text = str(value or "").strip()
if not text:
return ""
text = text.strip("/")
text = re.sub(r"[/_-]+", " ", text)
text = re.sub(r"\s+", " ", text).strip()
if not text:
return ""
return text.title()

View File

@@ -0,0 +1,67 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock, patch
from django.test import TestCase
from django.urls import reverse
from core.models import User
class ComposeSendCapabilityTests(TestCase):
def setUp(self):
self.user = User.objects.create_user("compose-send", "send@example.com", "pw")
self.client.force_login(self.user)
@patch("core.views.compose.transport.enqueue_runtime_command")
@patch("core.views.compose.transport.send_message_raw", new_callable=AsyncMock)
def test_unsupported_send_fails_fast_without_dispatch(
self,
mocked_send_message_raw,
mocked_enqueue_runtime_command,
):
response = self.client.post(
reverse("compose_send"),
{
"service": "xmpp",
"identifier": "person@example.com",
"text": "hello",
"failsafe_arm": "1",
"failsafe_confirm": "1",
},
)
self.assertEqual(200, response.status_code)
payload = json.loads(response.headers["HX-Trigger"])["composeSendResult"]
self.assertFalse(payload["ok"])
self.assertEqual("warning", payload["level"])
self.assertIn("Send not supported:", payload["message"])
mocked_send_message_raw.assert_not_awaited()
mocked_enqueue_runtime_command.assert_not_called()
def test_compose_page_displays_send_disabled_reason_for_unsupported_service(self):
response = self.client.get(
reverse("compose_page"),
{
"service": "xmpp",
"identifier": "person@example.com",
},
)
self.assertEqual(200, response.status_code)
content = response.content.decode("utf-8")
self.assertIn("Send disabled:", content)
self.assertIn("compose-send-btn", content)
def test_compose_page_uses_humanized_browser_title(self):
response = self.client.get(
reverse("compose_page"),
{
"service": "signal",
"identifier": "+15551230000",
},
)
self.assertEqual(200, response.status_code)
self.assertContains(response, "<title>Compose Page · GIA</title>", html=False)

View File

@@ -0,0 +1,239 @@
from __future__ import annotations
from pathlib import Path
from django.test import TestCase, override_settings
from core.mcp.tools import execute_tool, tool_specs
from core.models import (
AIRequest,
MCPToolAuditLog,
MemoryItem,
TaskProject,
User,
WorkspaceConversation,
DerivedTask,
DerivedTaskEvent,
)
@override_settings(MEMORY_SEARCH_BACKEND="django")
class MCPToolTests(TestCase):
def setUp(self):
self.user = User.objects.create_superuser(
username="mcp-tools-user",
email="mcp-tools@example.com",
password="pw",
)
self.conversation = WorkspaceConversation.objects.create(
user=self.user,
platform_type="signal",
title="MCP Memory Scope",
platform_thread_id="mcp-thread-1",
)
request = AIRequest.objects.create(
user=self.user,
conversation=self.conversation,
window_spec={},
operation="memory_propose",
)
self.memory = MemoryItem.objects.create(
user=self.user,
conversation=self.conversation,
memory_kind="fact",
status="active",
content={"text": "Prefers concise implementation notes."},
source_request=request,
confidence_score=0.8,
)
self.project = TaskProject.objects.create(user=self.user, name="MCP Project")
self.task = DerivedTask.objects.create(
user=self.user,
project=self.project,
title="Wire MCP server",
source_service="signal",
source_channel="team-chat",
status_snapshot="open",
immutable_payload={"scope": "memory"},
)
DerivedTaskEvent.objects.create(
task=self.task,
event_type="created",
actor_identifier="agent",
payload={"note": "task created"},
)
def test_tool_specs_include_memory_task_wiki_tools(self):
names = {item["name"] for item in tool_specs()}
self.assertIn("manticore.status", names)
self.assertIn("memory.propose", names)
self.assertIn("tasks.link_artifact", names)
self.assertIn("wiki.create_article", names)
self.assertIn("project.get_runbook", names)
def test_manticore_query_and_tasks_tools(self):
memory_payload = execute_tool(
"manticore.query",
{"user_id": self.user.id, "query": "concise implementation"},
)
self.assertGreaterEqual(int(memory_payload.get("count") or 0), 1)
first_hit = (memory_payload.get("hits") or [{}])[0]
self.assertEqual(str(self.memory.id), str(first_hit.get("memory_id")))
list_payload = execute_tool("tasks.list", {"user_id": self.user.id, "limit": 10})
self.assertEqual(1, int(list_payload.get("count") or 0))
self.assertEqual(str(self.task.id), str((list_payload.get("items") or [{}])[0].get("id")))
search_payload = execute_tool(
"tasks.search",
{"user_id": self.user.id, "query": "wire"},
)
self.assertEqual(1, int(search_payload.get("count") or 0))
events_payload = execute_tool("tasks.events", {"task_id": str(self.task.id), "limit": 5})
self.assertEqual(1, int(events_payload.get("count") or 0))
self.assertEqual("created", str((events_payload.get("items") or [{}])[0].get("event_type")))
def test_memory_proposal_review_flow(self):
propose_payload = execute_tool(
"memory.propose",
{
"user_id": self.user.id,
"conversation_id": str(self.conversation.id),
"memory_kind": "fact",
"content": {"field": "likes", "text": "short status bullets"},
"reason": "Operator memory capture",
"requested_by_identifier": "unit-test",
},
)
request_id = str((propose_payload.get("request") or {}).get("id") or "")
self.assertTrue(request_id)
pending_payload = execute_tool("memory.pending", {"user_id": self.user.id})
self.assertGreaterEqual(int(pending_payload.get("count") or 0), 1)
review_payload = execute_tool(
"memory.review",
{
"user_id": self.user.id,
"request_id": request_id,
"decision": "approve",
"reviewer_identifier": "admin",
},
)
memory_data = review_payload.get("memory") or {}
self.assertEqual("active", str(memory_data.get("status") or ""))
list_payload = execute_tool(
"memory.list",
{"user_id": self.user.id, "query": "status bullets"},
)
self.assertGreaterEqual(int(list_payload.get("count") or 0), 1)
def test_wiki_and_task_artifact_tools(self):
article_create = execute_tool(
"wiki.create_article",
{
"user_id": self.user.id,
"title": "MCP Integration Notes",
"markdown": "Initial setup steps.",
"related_task_id": str(self.task.id),
"tags": ["mcp", "memory"],
"status": "published",
},
)
article = article_create.get("article") or {}
self.assertEqual("mcp-integration-notes", str(article.get("slug") or ""))
article_update = execute_tool(
"wiki.update_article",
{
"user_id": self.user.id,
"article_id": str(article.get("id") or ""),
"markdown": "Updated setup steps.",
"approve_overwrite": True,
"summary": "Expanded usage instructions.",
},
)
revision = article_update.get("revision") or {}
self.assertEqual(2, int(revision.get("revision") or 0))
list_payload = execute_tool(
"wiki.list",
{"user_id": self.user.id, "query": "integration"},
)
self.assertEqual(1, int(list_payload.get("count") or 0))
get_payload = execute_tool(
"wiki.get",
{
"user_id": self.user.id,
"article_id": str(article.get("id") or ""),
"include_revisions": True,
},
)
self.assertGreaterEqual(len(get_payload.get("revisions") or []), 2)
note_payload = execute_tool(
"tasks.create_note",
{
"task_id": str(self.task.id),
"user_id": self.user.id,
"note": "Implemented wiki tooling.",
},
)
self.assertEqual("progress", str((note_payload.get("event") or {}).get("event_type")))
artifact_payload = execute_tool(
"tasks.link_artifact",
{
"task_id": str(self.task.id),
"user_id": self.user.id,
"kind": "wiki",
"path": "artifacts/wiki/mcp-integration-notes.md",
"summary": "Reference docs",
},
)
self.assertTrue(str((artifact_payload.get("artifact") or {}).get("id") or ""))
task_payload = execute_tool(
"tasks.get",
{"task_id": str(self.task.id), "user_id": self.user.id},
)
self.assertGreaterEqual(len(task_payload.get("artifact_links") or []), 1)
self.assertGreaterEqual(len(task_payload.get("knowledge_articles") or []), 1)
def test_docs_append_run_note_writes_file(self):
target = Path("/tmp/gia-mcp-test-notes.md")
if target.exists():
target.unlink()
payload = execute_tool(
"docs.append_run_note",
{
"title": "MCP Integration",
"content": "Connected manticore memory tools.",
"task_id": str(self.task.id),
"path": str(target),
},
)
self.assertTrue(payload.get("ok"))
content = target.read_text(encoding="utf-8")
self.assertIn("MCP Integration", content)
self.assertIn("Connected manticore memory tools.", content)
target.unlink()
def test_audit_logs_record_success_and_failures(self):
execute_tool("tasks.list", {"user_id": self.user.id})
with self.assertRaises(ValueError):
execute_tool("tasks.search", {"user_id": self.user.id})
success_row = MCPToolAuditLog.objects.filter(
tool_name="tasks.list",
ok=True,
).first()
self.assertIsNotNone(success_row)
failure_row = MCPToolAuditLog.objects.filter(
tool_name="tasks.search",
ok=False,
).first()
self.assertIsNotNone(failure_row)

View File

@@ -0,0 +1,97 @@
from __future__ import annotations
from datetime import timedelta
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from core.models import MemoryChangeRequest, MemoryItem, MessageEvent, User, WorkspaceConversation
class MemoryPipelineCommandTests(TestCase):
def setUp(self):
self.user = User.objects.create_user(
username="memory-pipeline-user",
email="memory-pipeline@example.com",
password="pw",
)
self.conversation = WorkspaceConversation.objects.create(
user=self.user,
platform_type="signal",
title="Pipeline Scope",
platform_thread_id="pipeline-thread-1",
)
def test_memory_suggest_from_messages_creates_pending_request(self):
MessageEvent.objects.create(
user=self.user,
conversation=self.conversation,
ts=1700000000000,
direction="in",
text="I prefer short status updates and bullet points.",
source_system="signal",
)
out = StringIO()
call_command(
"memory_suggest_from_messages",
user_id=str(self.user.id),
limit_messages=50,
max_items=10,
stdout=out,
)
rendered = out.getvalue()
self.assertIn("memory-suggest-from-messages", rendered)
self.assertGreaterEqual(MemoryItem.objects.filter(user=self.user).count(), 1)
self.assertGreaterEqual(
MemoryChangeRequest.objects.filter(user=self.user, status="pending").count(),
1,
)
def test_memory_hygiene_expires_and_detects_contradictions(self):
expired = MemoryItem.objects.create(
user=self.user,
conversation=self.conversation,
memory_kind="fact",
status="active",
content={"field": "likes", "text": "calls in the evening"},
confidence_score=0.6,
expires_at=timezone.now() - timedelta(days=1),
)
MemoryItem.objects.create(
user=self.user,
conversation=self.conversation,
memory_kind="fact",
status="active",
content={"field": "timezone", "text": "UTC+1"},
confidence_score=0.7,
)
MemoryItem.objects.create(
user=self.user,
conversation=self.conversation,
memory_kind="fact",
status="active",
content={"field": "timezone", "text": "UTC-5"},
confidence_score=0.7,
)
out = StringIO()
call_command(
"memory_hygiene",
user_id=str(self.user.id),
stdout=out,
)
rendered = out.getvalue()
self.assertIn("memory-hygiene", rendered)
expired.refresh_from_db()
self.assertEqual("deprecated", expired.status)
contradiction_requests = MemoryChangeRequest.objects.filter(
user=self.user,
status="pending",
action="update",
reason__icontains="Contradiction",
).count()
self.assertGreaterEqual(contradiction_requests, 1)

View File

@@ -5,7 +5,7 @@ from django.test import TestCase
from core.models import Person, PersonIdentifier, User
from core.presence import AvailabilitySignal, latest_state_for_people, record_native_signal
from core.presence.inference import now_ms
from core.views.compose import _context_base
from core.views.compose import _compose_availability_payload, _context_base
class PresenceQueryAndComposeContextTests(TestCase):
@@ -48,3 +48,30 @@ class PresenceQueryAndComposeContextTests(TestCase):
)
self.assertIsNotNone(base["person_identifier"])
self.assertEqual(str(self.person.id), str(base["person"].id))
def test_compose_availability_payload_falls_back_to_cross_service(self):
ts_value = now_ms()
record_native_signal(
AvailabilitySignal(
user=self.user,
person=self.person,
person_identifier=self.identifier,
service="whatsapp",
source_kind="message_in",
availability_state="available",
confidence=0.9,
ts=ts_value,
)
)
enabled, slices, summary = _compose_availability_payload(
user=self.user,
person=self.person,
service="signal",
range_start=ts_value - 60000,
range_end=ts_value + 60000,
)
self.assertTrue(enabled)
self.assertGreaterEqual(len(slices), 1)
self.assertEqual("whatsapp", str(slices[0].get("service")))
self.assertEqual("available", str(summary.get("state")))
self.assertTrue(bool(summary.get("is_cross_service")))

View File

@@ -124,3 +124,66 @@ class ReactionNormalizationTests(TestCase):
self.assertEqual("❤️", serialized["text"])
self.assertEqual([], list(serialized.get("reactions") or []))
self.assertEqual(str(anchor.id), serialized["reply_to_id"])
def test_apply_message_edit_tracks_history_and_updates_text(self):
message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000004000,
sender_uuid="author-3",
text="before",
source_service="signal",
source_message_id="1700000004000",
)
updated = async_to_sync(history.apply_message_edit)(
self.user,
self.identifier,
target_ts=1700000004000,
new_text="after",
source_service="signal",
actor="+15550000000",
payload={"origin": "test"},
)
self.assertIsNotNone(updated)
message.refresh_from_db()
self.assertEqual("after", str(message.text or ""))
edit_history = list((message.receipt_payload or {}).get("edit_history") or [])
self.assertEqual(1, len(edit_history))
self.assertEqual("before", str(edit_history[0].get("previous_text") or ""))
self.assertEqual("after", str(edit_history[0].get("new_text") or ""))
def test_serialize_message_marks_deleted_and_preserves_edit_history(self):
message = Message.objects.create(
user=self.user,
session=self.session,
ts=1700000005000,
sender_uuid="author-4",
text="keep me",
source_service="signal",
source_message_id="1700000005000",
receipt_payload={
"edit_history": [
{
"edited_ts": 1700000005100,
"source_service": "signal",
"actor": "+15550000000",
"previous_text": "old",
"new_text": "keep me",
}
],
"is_deleted": True,
"deleted": {
"deleted_ts": 1700000005200,
"source_service": "signal",
"actor": "+15550000000",
},
},
)
serialized = _serialize_message(message)
self.assertTrue(bool(serialized.get("is_deleted")))
self.assertEqual("(message deleted)", str(serialized.get("display_text") or ""))
self.assertEqual(1, int(serialized.get("edit_count") or 0))
self.assertEqual(1, len(list(serialized.get("edit_history") or [])))

View File

@@ -267,6 +267,76 @@ class SignalInboundReplyLinkTests(TransactionTestCase):
"Expected sync reaction to be applied via destination-number fallback resolution.",
)
def test_process_raw_inbound_event_applies_edit(self):
fake_ur = Mock()
fake_ur.message_received = AsyncMock(return_value=None)
fake_ur.xmpp = Mock()
fake_ur.xmpp.client = Mock()
fake_ur.xmpp.client.apply_external_reaction = AsyncMock(return_value=None)
client = SignalClient.__new__(SignalClient)
client.service = "signal"
client.ur = fake_ur
client.log = Mock()
client.client = Mock()
client.client.bot_uuid = ""
client.client.phone_number = ""
client._resolve_signal_identifiers = AsyncMock(return_value=[self.identifier])
client._auto_link_single_user_signal_identifier = AsyncMock(return_value=[])
payload = {
"envelope": {
"sourceNumber": "+15550002000",
"sourceUuid": "756078fd-d447-426d-a620-581a86d64f51",
"timestamp": 1772545466000,
"dataMessage": {
"editMessage": {
"targetSentTimestamp": 1772545458187,
"dataMessage": {"message": "anchor edited"},
}
},
}
}
async_to_sync(client._process_raw_inbound_event)(json.dumps(payload))
self.anchor.refresh_from_db()
self.assertEqual("anchor edited", str(self.anchor.text or ""))
edits = list((self.anchor.receipt_payload or {}).get("edit_history") or [])
self.assertEqual(1, len(edits))
def test_process_raw_inbound_event_applies_delete_tombstone_flag(self):
fake_ur = Mock()
fake_ur.message_received = AsyncMock(return_value=None)
fake_ur.xmpp = Mock()
fake_ur.xmpp.client = Mock()
fake_ur.xmpp.client.apply_external_reaction = AsyncMock(return_value=None)
client = SignalClient.__new__(SignalClient)
client.service = "signal"
client.ur = fake_ur
client.log = Mock()
client.client = Mock()
client.client.bot_uuid = ""
client.client.phone_number = ""
client._resolve_signal_identifiers = AsyncMock(return_value=[self.identifier])
client._auto_link_single_user_signal_identifier = AsyncMock(return_value=[])
payload = {
"envelope": {
"sourceNumber": "+15550002000",
"sourceUuid": "756078fd-d447-426d-a620-581a86d64f51",
"timestamp": 1772545467000,
"dataMessage": {
"delete": {
"targetSentTimestamp": 1772545458187,
}
},
}
}
async_to_sync(client._process_raw_inbound_event)(json.dumps(payload))
self.anchor.refresh_from_db()
self.assertTrue(bool((self.anchor.receipt_payload or {}).get("is_deleted")))
self.assertTrue(bool((self.anchor.receipt_payload or {}).get("deleted") or {}))
class SignalRuntimeCommandWritebackTests(TestCase):
def setUp(self):

View File

@@ -1,5 +1,7 @@
from asgiref.sync import async_to_sync
from django.test import SimpleTestCase
from core.clients import transport
from core.transports.capabilities import capability_snapshot, supports, unsupported_reason
@@ -15,3 +17,11 @@ class TransportCapabilitiesTests(SimpleTestCase):
snapshot = capability_snapshot()
self.assertIn("schema_version", snapshot)
self.assertIn("services", snapshot)
def test_transport_send_fails_fast_when_unsupported(self):
result = async_to_sync(transport.send_message_raw)(
"xmpp",
"person@example.com",
text="hello",
)
self.assertFalse(result)

View File

@@ -52,7 +52,7 @@ from core.models import (
WorkspaceConversation,
)
from core.presence import get_settings as get_availability_settings
from core.presence import spans_for_range
from core.presence import latest_state_for_people, spans_for_range
from core.realtime.typing_state import get_person_typing_state
from core.transports.capabilities import supports, unsupported_reason
from core.translation.engine import process_inbound_translation
@@ -190,6 +190,92 @@ def _serialize_availability_spans(spans):
return rows
def _availability_summary_for_person(*, user, person: Person, service: str) -> dict:
person_key = str(person.id)
selected_service = str(service or "").strip().lower()
state_map = latest_state_for_people(
user=user,
person_ids=[person_key],
service=selected_service,
)
row = state_map.get(person_key)
is_cross_service = False
if row is None and selected_service:
state_map = latest_state_for_people(
user=user,
person_ids=[person_key],
service="",
)
row = state_map.get(person_key)
is_cross_service = row is not None
if row is None:
return {}
ts_value = int(row.get("ts") or 0)
state_value = str(row.get("state") or "unknown").strip().lower() or "unknown"
return {
"state": state_value,
"state_label": state_value.title(),
"service": str(row.get("service") or selected_service or "").strip().lower(),
"confidence": float(row.get("confidence") or 0.0),
"source_kind": str(row.get("source_kind") or "").strip(),
"ts": ts_value,
"ts_label": _format_ts_label(ts_value) if ts_value > 0 else "",
"is_cross_service": bool(is_cross_service),
}
def _compose_availability_payload(
*,
user,
person: Person | None,
service: str,
range_start: int,
range_end: int,
) -> tuple[bool, list[dict], dict]:
settings_row = get_availability_settings(user)
if (
person is None
or not settings_row.enabled
or not settings_row.show_in_chat
):
return False, [], {}
service_key = str(service or "").strip().lower()
rows = _serialize_availability_spans(
spans_for_range(
user=user,
person=person,
start_ts=int(range_start or 0),
end_ts=int(range_end or 0),
service=service_key,
limit=200,
)
)
used_cross_service = False
if not rows and service_key:
rows = _serialize_availability_spans(
spans_for_range(
user=user,
person=person,
start_ts=int(range_start or 0),
end_ts=int(range_end or 0),
service="",
limit=200,
)
)
used_cross_service = bool(rows)
summary = _availability_summary_for_person(
user=user,
person=person,
service=service_key,
)
if used_cross_service and summary:
summary["is_cross_service"] = True
return True, rows, summary
def _is_outgoing(msg: Message) -> bool:
is_outgoing = str(msg.custom_author or "").upper() in {"USER", "BOT"}
if not is_outgoing:
@@ -507,6 +593,66 @@ def _serialize_message(msg: Message) -> dict:
)
# Receipt payload and metadata
receipt_payload = msg.receipt_payload or {}
deleted_payload = dict((receipt_payload or {}).get("deleted") or {})
is_deleted = bool(
(receipt_payload or {}).get("is_deleted")
or deleted_payload
or (receipt_payload or {}).get("delete_events")
)
deleted_ts = 0
for candidate in (
deleted_payload.get("deleted_ts"),
deleted_payload.get("updated_at"),
deleted_payload.get("ts"),
):
try:
deleted_ts = int(candidate or 0)
except Exception:
deleted_ts = 0
if deleted_ts > 0:
break
deleted_display = _format_ts_label(deleted_ts) if deleted_ts > 0 else ""
deleted_actor = str(deleted_payload.get("actor") or "").strip()
deleted_source_service = str(deleted_payload.get("source_service") or "").strip()
edit_history_rows = []
for row in list((receipt_payload or {}).get("edit_history") or []):
item = dict(row or {})
edited_ts = 0
for candidate in (
item.get("edited_ts"),
item.get("updated_at"),
item.get("ts"),
):
try:
edited_ts = int(candidate or 0)
except Exception:
edited_ts = 0
if edited_ts > 0:
break
previous_text = str(item.get("previous_text") or "")
new_text = str(item.get("new_text") or "")
edit_history_rows.append(
{
"edited_ts": edited_ts,
"edited_display": _format_ts_label(edited_ts) if edited_ts > 0 else "",
"source_service": str(item.get("source_service") or "").strip().lower(),
"actor": str(item.get("actor") or "").strip(),
"previous_text": previous_text,
"new_text": new_text,
}
)
edit_history_rows.sort(key=lambda row: int(row.get("edited_ts") or 0))
edit_count = len(edit_history_rows)
last_edit_ts = int(edit_history_rows[-1].get("edited_ts") or 0) if edit_count else 0
last_edit_display = _format_ts_label(last_edit_ts) if last_edit_ts > 0 else ""
if is_deleted:
display_text = "(message deleted)"
image_urls = []
image_url = ""
hide_text = False
read_source_service = str(msg.read_source_service or "").strip()
read_by_identifier = str(msg.read_by_identifier or "").strip()
reaction_rows = []
@@ -570,6 +716,17 @@ def _serialize_message(msg: Message) -> dict:
"receipt_payload": receipt_payload,
"read_source_service": read_source_service,
"read_by_identifier": read_by_identifier,
"is_deleted": is_deleted,
"deleted_ts": deleted_ts,
"deleted_display": deleted_display,
"deleted_actor": deleted_actor,
"deleted_source_service": deleted_source_service,
"edit_history": edit_history_rows,
"edit_history_json": json.dumps(edit_history_rows),
"edit_count": edit_count,
"is_edited": bool(edit_count),
"last_edit_ts": last_edit_ts,
"last_edit_display": last_edit_display,
"reactions": reaction_rows,
"source_message_id": str(getattr(msg, "source_message_id", "") or ""),
"reply_to_id": str(getattr(msg, "reply_to_id", "") or ""),
@@ -2694,35 +2851,27 @@ def _panel_context(
counterpart_identifiers=counterpart_identifiers,
conversation=conversation,
)
availability_slices = []
availability_enabled = False
availability_settings = get_availability_settings(request.user)
if (
base["person"] is not None
and availability_settings.enabled
and availability_settings.show_in_chat
):
range_start = (
int(session_bundle["messages"][0].ts or 0) if session_bundle["messages"] else 0
)
range_end = (
int(session_bundle["messages"][-1].ts or 0) if session_bundle["messages"] else 0
)
if range_start <= 0 or range_end <= 0:
now_ts = int(time.time() * 1000)
range_start = now_ts - (24 * 60 * 60 * 1000)
range_end = now_ts
availability_enabled = True
availability_slices = _serialize_availability_spans(
spans_for_range(
user=request.user,
person=base["person"],
start_ts=range_start,
end_ts=range_end,
service=base["service"],
limit=200,
)
)
range_start = (
int(session_bundle["messages"][0].ts or 0) if session_bundle["messages"] else 0
)
range_end = (
int(session_bundle["messages"][-1].ts or 0) if session_bundle["messages"] else 0
)
if range_start <= 0 or range_end <= 0:
now_ts = int(time.time() * 1000)
range_start = now_ts - (24 * 60 * 60 * 1000)
range_end = now_ts
(
availability_enabled,
availability_slices,
availability_summary,
) = _compose_availability_payload(
user=request.user,
person=base["person"],
service=base["service"],
range_start=range_start,
range_end=range_end,
)
glance_items = _build_glance_items(
serialized_messages,
person_id=(base["person"].id if base["person"] else None),
@@ -2923,9 +3072,15 @@ def _panel_context(
"manual_icon_class": "fa-solid fa-paper-plane",
"panel_id": f"compose-panel-{unique}",
"typing_state_json": json.dumps(typing_state),
"capability_send": supports(base["service"], "send"),
"capability_send_reason": unsupported_reason(base["service"], "send"),
"capability_reactions": supports(base["service"], "reactions"),
"capability_reactions_reason": unsupported_reason(base["service"], "reactions"),
"availability_enabled": availability_enabled,
"availability_slices": availability_slices,
"availability_slices_json": json.dumps(availability_slices),
"availability_summary": availability_summary,
"availability_summary_json": json.dumps(availability_summary),
"command_options": command_options,
"bp_binding_summary": bp_binding_summary,
"platform_options": platform_options,
@@ -3383,31 +3538,23 @@ class ComposeThread(LoginRequiredMixin, View):
counterpart_identifiers = _counterpart_identifiers_for_person(
request.user, base["person"]
)
availability_slices = []
availability_settings = get_availability_settings(request.user)
if (
base["person"] is not None
and availability_settings.enabled
and availability_settings.show_in_chat
):
range_start = (
int(messages[0].ts or 0) if messages else max(0, int(after_ts or 0))
)
range_end = int(latest_ts or 0)
if range_start <= 0 or range_end <= 0:
now_ts = int(time.time() * 1000)
range_start = now_ts - (24 * 60 * 60 * 1000)
range_end = now_ts
availability_slices = _serialize_availability_spans(
spans_for_range(
user=request.user,
person=base["person"],
start_ts=range_start,
end_ts=range_end,
service=base["service"],
limit=200,
)
)
range_start = int(messages[0].ts or 0) if messages else max(0, int(after_ts or 0))
range_end = int(latest_ts or 0)
if range_start <= 0 or range_end <= 0:
now_ts = int(time.time() * 1000)
range_start = now_ts - (24 * 60 * 60 * 1000)
range_end = now_ts
(
_availability_enabled,
availability_slices,
availability_summary,
) = _compose_availability_payload(
user=request.user,
person=base["person"],
service=base["service"],
range_start=range_start,
range_end=range_end,
)
payload = {
"messages": _serialize_messages_with_artifacts(
messages,
@@ -3417,6 +3564,7 @@ class ComposeThread(LoginRequiredMixin, View):
),
"last_ts": latest_ts,
"availability_slices": availability_slices,
"availability_summary": availability_summary,
"typing": get_person_typing_state(
user_id=request.user.id,
person_id=base["person"].id if base["person"] else None,
@@ -4459,6 +4607,18 @@ class ComposeSend(LoginRequiredMixin, View):
log_prefix = (
f"[ComposeSend] service={base['service']} identifier={base['identifier']}"
)
if bool(getattr(settings, "CAPABILITY_ENFORCEMENT_ENABLED", True)) and not supports(
str(base["service"] or "").strip().lower(),
"send",
):
reason = unsupported_reason(str(base["service"] or "").strip().lower(), "send")
return self._response(
request,
ok=False,
message=f"Send not supported: {reason}",
level="warning",
panel_id=panel_id,
)
logger.debug(f"{log_prefix} text_len={len(text)} attempting send")
# If runtime is out-of-process, enqueue command and return immediately (non-blocking).

88
core/views/prosody.py Normal file
View File

@@ -0,0 +1,88 @@
from __future__ import annotations
import base64
from django.conf import settings
from django.contrib.auth import authenticate, get_user_model
from django.http import HttpRequest, HttpResponse
from django.views import View
class ProsodyAuthBridge(View):
"""
Minimal external-auth bridge for Prosody.
Returns plain text "1" or "0" per Prosody external auth protocol.
"""
http_method_names = ["get", "post"]
def _denied(self) -> HttpResponse:
return HttpResponse("0\n", content_type="text/plain")
def _b64url_decode(self, value: str) -> str:
raw = str(value or "").strip()
if not raw:
return ""
padded = raw + "=" * (-len(raw) % 4)
padded = padded.replace("-", "+").replace("_", "/")
try:
return base64.b64decode(padded.encode("ascii")).decode(
"utf-8", errors="ignore"
)
except Exception:
return ""
def _extract_line(self, request: HttpRequest) -> str:
line_b64 = str(request.GET.get("line_b64") or "").strip()
if line_b64:
return self._b64url_decode(line_b64)
body = (request.body or b"").decode("utf-8", errors="ignore").strip()
if body:
return body
return str(request.POST.get("line") or "").strip()
def post(self, request: HttpRequest) -> HttpResponse:
remote_addr = str(request.META.get("REMOTE_ADDR") or "").strip()
if remote_addr not in {"127.0.0.1", "::1"}:
return self._denied()
expected_secret = str(getattr(settings, "XMPP_SECRET", "") or "").strip()
supplied_secret = str(request.headers.get("X-Prosody-Secret") or "").strip()
if not supplied_secret:
supplied_secret = str(request.GET.get("secret") or "").strip()
secret_b64 = str(request.GET.get("secret_b64") or "").strip()
if not supplied_secret and secret_b64:
supplied_secret = self._b64url_decode(secret_b64)
if not expected_secret or supplied_secret != expected_secret:
return self._denied()
line = self._extract_line(request)
if not line:
return self._denied()
parts = line.split(":")
if len(parts) < 3:
return self._denied()
command, username, _domain = parts[:3]
password = ":".join(parts[3:]) if len(parts) > 3 else None
if command == "auth":
if not password:
return self._denied()
user = authenticate(username=username, password=password)
ok = bool(user is not None and getattr(user, "is_active", False))
return HttpResponse("1\n" if ok else "0\n", content_type="text/plain")
if command == "isuser":
User = get_user_model()
exists = bool(User.objects.filter(username=username).exists())
return HttpResponse("1\n" if exists else "0\n", content_type="text/plain")
if command == "setpass":
return self._denied()
return self._denied()
def get(self, request: HttpRequest) -> HttpResponse:
return self.post(request)

View File

@@ -14,6 +14,7 @@ from django.views import View
from core.forms import AIWorkspaceWindowForm
from core.lib.notify import raw_sendmsg
from core.memory.retrieval import retrieve_memories_for_prompt
from core.messaging import ai as ai_runner
from core.messaging.utils import messages_to_string
from core.models import (
@@ -3936,8 +3937,27 @@ class AIWorkspaceRunOperation(LoginRequiredMixin, View):
)
return rows
def _build_prompt(self, operation, owner_name, person, transcript, user_notes):
def _build_prompt(
self,
operation,
owner_name,
person,
transcript,
user_notes,
memory_context,
):
notes = (user_notes or "").strip()
memory_lines = []
for index, item in enumerate(memory_context or [], start=1):
content = item.get("content") or {}
text = str(content.get("text") or "").strip()
if not text:
text = str(content).strip()
if not text:
continue
kind = str(item.get("memory_kind") or "fact")
memory_lines.append(f"{index}. [{kind}] {text}")
memory_text = "\n".join(memory_lines) if memory_lines else "None"
if operation == "draft_reply":
instruction = (
"Generate 3 concise reply options in different tones: soft, neutral, firm. "
@@ -3965,6 +3985,8 @@ class AIWorkspaceRunOperation(LoginRequiredMixin, View):
f"Owner: {owner_name}\n"
f"Person: {person.name}\n"
f"Notes: {notes or 'None'}\n\n"
"Approved Memory Context:\n"
f"{memory_text}\n\n"
f"Conversation:\n{transcript}"
),
},
@@ -4111,12 +4133,20 @@ class AIWorkspaceRunOperation(LoginRequiredMixin, View):
)
try:
memory_context = retrieve_memories_for_prompt(
user_id=request.user.id,
person_id=str(person.id),
conversation_id=str(conversation.id),
statuses=("active",),
limit=12,
)
prompt = self._build_prompt(
operation=operation,
owner_name=owner_name,
person=person,
transcript=transcript,
user_notes=user_notes,
memory_context=memory_context,
)
result_text = async_to_sync(ai_runner.run_prompt)(prompt, ai_obj)
draft_options = (