diff --git a/app/urls.py b/app/urls.py index 793d4b4..dca8f69 100644 --- a/app/urls.py +++ b/app/urls.py @@ -37,6 +37,7 @@ from core.views import ( queues, sessions, signal, + tasks, system, whatsapp, workspace, @@ -266,6 +267,41 @@ urlpatterns = [ compose.ComposeContactCreateAll.as_view(), name="compose_contact_create_all", ), + path( + "compose/answer-suggestion/send/", + tasks.AnswerSuggestionSend.as_view(), + name="compose_answer_suggestion_send", + ), + path( + "tasks/", + tasks.TasksHub.as_view(), + name="tasks_hub", + ), + path( + "tasks/projects//", + tasks.TaskProjectDetail.as_view(), + name="tasks_project", + ), + path( + "tasks/epics//", + tasks.TaskEpicDetail.as_view(), + name="tasks_epic", + ), + path( + "tasks/groups///", + tasks.TaskGroupDetail.as_view(), + name="tasks_group", + ), + path( + "tasks/task//", + tasks.TaskDetail.as_view(), + name="tasks_task", + ), + path( + "settings/tasks/", + tasks.TaskSettings.as_view(), + name="tasks_settings", + ), # AIs path( "ai/workspace/", diff --git a/core/assist/__init__.py b/core/assist/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/assist/engine.py b/core/assist/engine.py new file mode 100644 index 0000000..bf64a8f --- /dev/null +++ b/core/assist/engine.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from core.assist.repeat_answer import find_repeat_answer, learn_from_message +from core.models import Message +from core.tasks.engine import process_inbound_task_intelligence + + +async def process_inbound_assist(message: Message) -> None: + if message is None: + return + await learn_from_message(message) + await find_repeat_answer(message.user, message) + await process_inbound_task_intelligence(message) diff --git a/core/assist/repeat_answer.py b/core/assist/repeat_answer.py new file mode 100644 index 0000000..70a3ebd --- /dev/null +++ b/core/assist/repeat_answer.py @@ -0,0 +1,136 @@ +from __future__ import annotations + +import hashlib +import re +from dataclasses import dataclass + +from asgiref.sync import sync_to_async +from django.utils import timezone + +from core.models import AnswerMemory, AnswerSuggestionEvent, Message + +_WORD_RE = re.compile(r"[^a-z0-9\s]+", re.IGNORECASE) + + +@dataclass(slots=True) +class RepeatAnswerSuggestion: + answer_memory_id: str + answer_text: str + score: float + + +def _normalize_question(text: str) -> str: + body = str(text or "").strip().lower() + body = _WORD_RE.sub(" ", body) + body = re.sub(r"\s+", " ", body).strip() + return body + + +def _fingerprint(text: str) -> str: + norm = _normalize_question(text) + if not norm: + return "" + return hashlib.sha1(norm.encode("utf-8")).hexdigest() + + +def _is_question(text: str) -> bool: + body = str(text or "").strip() + if not body: + return False + low = body.lower() + return body.endswith("?") or low.startswith(("what", "why", "how", "when", "where", "who", "can ", "do ", "did ", "is ", "are ")) + + +def _is_group_channel(message: Message) -> bool: + channel = str(getattr(message, "source_chat_id", "") or "").strip().lower() + if channel.endswith("@g.us"): + return True + return str(getattr(message, "source_service", "") or "").strip().lower() == "xmpp" and "conference." in channel + + +async def learn_from_message(message: Message) -> None: + if message is None: + return + text = str(message.text or "").strip() + if not text: + return + if dict(message.message_meta or {}).get("origin_tag"): + return + + # Build memory by linking obvious reply answers to prior questions. + if message.reply_to_id and message.reply_to: + q_text = str(message.reply_to.text or "").strip() + if _is_question(q_text): + fp = _fingerprint(q_text) + if fp: + await sync_to_async(AnswerMemory.objects.create)( + user=message.user, + service=message.source_service or "web", + channel_identifier=message.source_chat_id or "", + question_fingerprint=fp, + question_text=q_text, + answer_message=message, + answer_text=text, + confidence_meta={"source": "reply_pair"}, + ) + + +async def find_repeat_answer(user, message: Message) -> RepeatAnswerSuggestion | None: + if message is None: + return None + if not _is_group_channel(message): + return None + if dict(message.message_meta or {}).get("origin_tag"): + return None + text = str(message.text or "").strip() + if not _is_question(text): + return None + + fp = _fingerprint(text) + if not fp: + return None + + # channel cooldown for repeated suggestions in short windows + cooldown_cutoff = timezone.now() - timezone.timedelta(minutes=3) + cooldown_exists = await sync_to_async( + lambda: AnswerSuggestionEvent.objects.filter( + user=user, + message__source_service=message.source_service, + message__source_chat_id=message.source_chat_id, + status="suggested", + created_at__gte=cooldown_cutoff, + ).exists() + )() + if cooldown_exists: + return None + + memory = await sync_to_async( + lambda: AnswerMemory.objects.filter( + user=user, + service=message.source_service or "web", + channel_identifier=message.source_chat_id or "", + question_fingerprint=fp, + ) + .order_by("-created_at") + .first() + )() + if not memory: + return None + + answer = str(memory.answer_text or "").strip() + if not answer: + return None + + score = 0.99 + await sync_to_async(AnswerSuggestionEvent.objects.create)( + user=user, + message=message, + status="suggested", + candidate_answer=memory, + score=score, + ) + return RepeatAnswerSuggestion( + answer_memory_id=str(memory.id), + answer_text=answer, + score=score, + ) diff --git a/core/commands/engine.py b/core/commands/engine.py index fb1ac8b..c11cb3c 100644 --- a/core/commands/engine.py +++ b/core/commands/engine.py @@ -1,9 +1,14 @@ from __future__ import annotations from asgiref.sync import sync_to_async +from django.conf import settings from core.commands.base import CommandContext, CommandResult -from core.commands.handlers.bp import BPCommandHandler +from core.commands.handlers.bp import ( + BPCommandHandler, + bp_reply_is_optional_for_trigger, + bp_trigger_matches, +) from core.commands.registry import get as get_handler from core.commands.registry import register from core.messaging.reply_sync import is_mirrored_origin @@ -86,6 +91,12 @@ async def _eligible_profiles(ctx: CommandContext) -> list[CommandProfile]: def _matches_trigger(profile: CommandProfile, text: str) -> bool: + if profile.slug == "bp" and bool(getattr(settings, "BP_SUBCOMMANDS_V1", True)): + return bp_trigger_matches( + message_text=text, + trigger_token=profile.trigger_token, + exact_match_only=profile.exact_match_only, + ) body = str(text or "").strip() trigger = str(profile.trigger_token or "").strip() if not trigger: @@ -111,15 +122,22 @@ async def process_inbound_message(ctx: CommandContext) -> list[CommandResult]: if not _matches_trigger(profile, ctx.message_text): continue if profile.reply_required and trigger_message.reply_to_id is None: - results.append( - CommandResult( - ok=False, - status="skipped", - error="reply_required", - payload={"profile": profile.slug}, + if ( + profile.slug == "bp" + and bool(getattr(settings, "BP_SUBCOMMANDS_V1", True)) + and bp_reply_is_optional_for_trigger(ctx.message_text) + ): + pass + else: + results.append( + CommandResult( + ok=False, + status="skipped", + error="reply_required", + payload={"profile": profile.slug}, + ) ) - ) - continue + continue handler = get_handler(profile.slug) if handler is None: results.append( diff --git a/core/commands/handlers/bp.py b/core/commands/handlers/bp.py index 442db8f..b4d81fb 100644 --- a/core/commands/handlers/bp.py +++ b/core/commands/handlers/bp.py @@ -1,5 +1,6 @@ from __future__ import annotations +import re import time from asgiref.sync import sync_to_async @@ -8,6 +9,7 @@ from django.conf import settings from core.commands.base import CommandContext, CommandHandler, CommandResult from core.commands.delivery import post_status_in_source, post_to_channel_binding from core.messaging import ai as ai_runner +from core.messaging.text_export import plain_text_blob from core.messaging.utils import messages_to_string from core.models import ( AI, @@ -19,6 +21,49 @@ from core.models import ( Message, ) +_BP_SET_RE = re.compile(r"^\s*#bp\s+set#(?P.*)$", re.IGNORECASE | re.DOTALL) +_BP_SET_RANGE_RE = re.compile(r"^\s*#bp\s+set\s+range#(?:.*)$", re.IGNORECASE | re.DOTALL) + + +class BPParsedCommand(dict): + @property + def command(self) -> str | None: + value = self.get("command") + return str(value) if value else None + + @property + def remainder_text(self) -> str: + return str(self.get("remainder_text") or "") + + + +def parse_bp_subcommand(text: str) -> BPParsedCommand: + body = str(text or "") + if _BP_SET_RANGE_RE.match(body): + return BPParsedCommand(command="set_range", remainder_text="") + match = _BP_SET_RE.match(body) + if match: + return BPParsedCommand(command="set", remainder_text=str(match.group("rest") or "").strip()) + return BPParsedCommand(command=None, remainder_text="") + + +def bp_trigger_matches(message_text: str, trigger_token: str, exact_match_only: bool) -> bool: + body = str(message_text or "").strip() + trigger = str(trigger_token or "").strip() + parsed = parse_bp_subcommand(body) + if parsed.command and bool(getattr(settings, "BP_SUBCOMMANDS_V1", True)): + return True + if not trigger: + return False + if exact_match_only: + return body == trigger + return trigger in body + + +def bp_reply_is_optional_for_trigger(message_text: str) -> bool: + parsed = parse_bp_subcommand(message_text) + return parsed.command == "set" + def _bp_system_prompt(): return ( @@ -43,22 +88,6 @@ def _clamp_transcript(transcript: str, max_chars: int) -> str: ) -def _bp_fallback_markdown(template_text: str, transcript: str, error_text: str = "") -> str: - header = ( - "## Business Plan (Draft)\n\n" - "Automatic fallback was used because AI generation failed for this run.\n" - ) - if error_text: - header += f"\nFailure: `{error_text}`\n" - return ( - f"{header}\n" - "### Template\n" - f"{template_text}\n\n" - "### Transcript Window\n" - f"{transcript}" - ) - - class BPCommandHandler(CommandHandler): slug = "bp" @@ -95,60 +124,8 @@ class BPCommandHandler(CommandHandler): failed_bindings += 1 return {"sent_bindings": sent_bindings, "failed_bindings": failed_bindings} - async def execute(self, ctx: CommandContext) -> CommandResult: - trigger = await sync_to_async( - lambda: Message.objects.select_related("user", "session") - .filter(id=ctx.message_id) - .first() - )() - if trigger is None: - return CommandResult(ok=False, status="failed", error="trigger_not_found") - - profile = await sync_to_async( - lambda: trigger.user.commandprofile_set.filter(slug=self.slug, enabled=True) - .first() - )() - if profile is None: - return CommandResult(ok=False, status="skipped", error="profile_missing") - - actions = await sync_to_async(list)( - CommandAction.objects.filter( - profile=profile, - enabled=True, - ).order_by("position", "id") - ) - action_types = {row.action_type for row in actions} - if "extract_bp" not in action_types: - return CommandResult(ok=False, status="skipped", error="extract_bp_disabled") - - run, created = await sync_to_async(CommandRun.objects.get_or_create)( - profile=profile, - trigger_message=trigger, - defaults={ - "user": trigger.user, - "status": "running", - }, - ) - if not created and run.status in {"ok", "running"}: - return CommandResult( - ok=True, - status="ok", - payload={"document_id": str(run.result_ref_id or "")}, - ) - run.status = "running" - run.error = "" - await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) - - if trigger.reply_to_id is None: - run.status = "failed" - run.error = "bp_requires_reply_target" - await sync_to_async(run.save)( - update_fields=["status", "error", "updated_at"] - ) - return CommandResult(ok=False, status="failed", error=run.error) - - anchor = trigger.reply_to - rows = await sync_to_async(list)( + async def _load_window(self, trigger: Message, anchor: Message) -> list[Message]: + return await sync_to_async(list)( Message.objects.filter( user=trigger.user, session=trigger.session, @@ -158,105 +135,142 @@ class BPCommandHandler(CommandHandler): .order_by("ts") .select_related("session", "session__identifier", "session__identifier__person") ) - transcript = messages_to_string( - rows, - author_rewrites={"USER": "Operator", "BOT": "Assistant"}, - ) - max_transcript_chars = int( - getattr(settings, "BP_MAX_TRANSCRIPT_CHARS", 12000) or 12000 - ) - transcript = _clamp_transcript(transcript, max_transcript_chars) - default_template = ( - "Business Plan:\n" - "- Objective\n" - "- Audience\n" - "- Offer\n" - "- GTM\n" - "- Risks" - ) - template_text = profile.template_text or default_template - max_template_chars = int( - getattr(settings, "BP_MAX_TEMPLATE_CHARS", 5000) or 5000 - ) - template_text = str(template_text or "")[:max_template_chars] - ai_obj = await sync_to_async( - # Match compose draft/engage lookup behavior exactly. - lambda: AI.objects.filter(user=trigger.user).first() - )() - ai_warning = "" - if ai_obj is None: - summary = _bp_fallback_markdown( - template_text, - transcript, - "ai_not_configured", - ) - ai_warning = "ai_not_configured" - else: - prompt = [ - {"role": "system", "content": _bp_system_prompt()}, - { - "role": "user", - "content": ( - "Template:\n" - f"{template_text}\n\n" - "Messages:\n" - f"{transcript}" - ), - }, - ] - try: - summary = str( - await ai_runner.run_prompt( - prompt, ai_obj, operation="command_bp_extract" - ) - or "" - ).strip() - if not summary: - raise RuntimeError("empty_ai_response") - except Exception as exc: - ai_warning = f"bp_ai_failed:{exc}" - summary = _bp_fallback_markdown( - template_text, - transcript, - str(exc), - ) + def _annotation(self, mode: str, message_count: int, has_addendum: bool = False) -> str: + if mode == "set" and has_addendum: + return "Generated from 1 message + 1 addendum." + if message_count == 1: + return "Generated from 1 message." + return f"Generated from {int(message_count)} messages." + + async def _persist_document( + self, + *, + run: CommandRun, + trigger: Message, + profile, + anchor: Message | None, + content: str, + mode: str, + source_message_ids: list[str], + annotation: str, + ) -> BusinessPlanDocument: + payload = { + "mode": mode, + "source_message_ids": list(source_message_ids), + "annotation": annotation, + } document = await sync_to_async(BusinessPlanDocument.objects.create)( user=trigger.user, command_profile=profile, - source_service=trigger.source_service or ctx.service, - source_channel_identifier=trigger.source_chat_id or ctx.channel_identifier, + source_service=trigger.source_service or "web", + source_channel_identifier=trigger.source_chat_id or "", trigger_message=trigger, anchor_message=anchor, title=f"Business Plan {time.strftime('%Y-%m-%d %H:%M:%S')}", status="draft", - content_markdown=summary, - structured_payload={"source_message_ids": [str(row.id) for row in rows]}, + content_markdown=content, + structured_payload=payload, ) await sync_to_async(BusinessPlanRevision.objects.create)( document=document, editor_user=trigger.user, - content_markdown=summary, - structured_payload={"source_message_ids": [str(row.id) for row in rows]}, + content_markdown=content, + structured_payload=payload, ) + run.result_ref = document + await sync_to_async(run.save)(update_fields=["result_ref", "updated_at"]) + return document + + async def _execute_set_or_range( + self, + *, + trigger: Message, + run: CommandRun, + profile, + action_types: set[str], + parsed: BPParsedCommand, + ) -> CommandResult: + mode = str(parsed.command or "") + remainder = parsed.remainder_text + anchor = trigger.reply_to + + if mode == "set_range": + if anchor is None: + run.status = "failed" + run.error = "bp_set_range_requires_reply_target" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + rows = await self._load_window(trigger, anchor) + content = plain_text_blob(rows) + if not content.strip(): + run.status = "failed" + run.error = "bp_set_range_empty_content" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + annotation = self._annotation("set_range", len(rows)) + doc = await self._persist_document( + run=run, + trigger=trigger, + profile=profile, + anchor=anchor, + content=content, + mode="set_range", + source_message_ids=[str(row.id) for row in rows], + annotation=annotation, + ) + elif mode == "set": + source_ids: list[str] = [] + if anchor is not None and not remainder: + content = str(anchor.text or "").strip() or "(no text)" + source_ids.append(str(anchor.id)) + has_addendum = False + elif anchor is not None and remainder: + base = str(anchor.text or "").strip() or "(no text)" + content = ( + f"{base}\n" + "--- Addendum (newer message text) ---\n" + f"{remainder}" + ) + source_ids.extend([str(anchor.id), str(trigger.id)]) + has_addendum = True + elif remainder: + content = remainder + source_ids.append(str(trigger.id)) + has_addendum = False + else: + run.status = "failed" + run.error = "bp_set_empty_content" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + + annotation = self._annotation("set", 1 if not has_addendum else 2, has_addendum) + doc = await self._persist_document( + run=run, + trigger=trigger, + profile=profile, + anchor=anchor, + content=content, + mode="set", + source_message_ids=source_ids, + annotation=annotation, + ) + else: + run.status = "failed" + run.error = "bp_unknown_subcommand" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) fanout_stats = {"sent_bindings": 0, "failed_bindings": 0} - fanout_text = summary - if ai_warning: - warning_text = str(ai_warning or "").strip() - if len(warning_text) > 300: - warning_text = warning_text[:297].rstrip() + "..." - fanout_text = ( - "[bp] AI generation failed. Draft document was saved in fallback mode." - + (f"\nReason: {warning_text}" if warning_text else "") - ) if "post_result" in action_types: - fanout_stats = await self._fanout(run, fanout_text) + fanout_body = f"{doc.content_markdown}\n\n{doc.structured_payload.get('annotation', '')}".strip() + fanout_stats = await self._fanout(run, fanout_body) if "status_in_source" == profile.visibility_mode: - status_text = f"[bp] Generated business plan: {document.title}" - if ai_warning: - status_text += " (fallback mode)" + status_text = ( + f"[bp] {doc.structured_payload.get('annotation', '').strip()} " + f"Saved as {doc.title}." + ).strip() sent_count = int(fanout_stats.get("sent_bindings") or 0) failed_count = int(fanout_stats.get("failed_bindings") or 0) if sent_count or failed_count: @@ -270,13 +284,154 @@ class BPCommandHandler(CommandHandler): ) run.status = "ok" - run.result_ref = document - run.error = ai_warning - await sync_to_async(run.save)( - update_fields=["status", "result_ref", "error", "updated_at"] + run.error = "" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=True, status="ok", payload={"document_id": str(doc.id)}) + + async def _execute_legacy_ai( + self, + *, + trigger: Message, + run: CommandRun, + profile, + action_types: set[str], + ctx: CommandContext, + ) -> CommandResult: + if trigger.reply_to_id is None: + run.status = "failed" + run.error = "bp_requires_reply_target" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + + anchor = trigger.reply_to + rows = await self._load_window(trigger, anchor) + transcript = messages_to_string( + rows, + author_rewrites={"USER": "Operator", "BOT": "Assistant"}, ) - return CommandResult( - ok=True, - status="ok", - payload={"document_id": str(document.id)}, + max_transcript_chars = int(getattr(settings, "BP_MAX_TRANSCRIPT_CHARS", 12000) or 12000) + transcript = _clamp_transcript(transcript, max_transcript_chars) + default_template = ( + "Business Plan:\n" + "- Objective\n" + "- Audience\n" + "- Offer\n" + "- GTM\n" + "- Risks" + ) + template_text = profile.template_text or default_template + max_template_chars = int(getattr(settings, "BP_MAX_TEMPLATE_CHARS", 5000) or 5000) + template_text = str(template_text or "")[:max_template_chars] + ai_obj = await sync_to_async(lambda: AI.objects.filter(user=trigger.user).first())() + if ai_obj is None: + run.status = "failed" + run.error = "ai_not_configured" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + + prompt = [ + {"role": "system", "content": _bp_system_prompt()}, + { + "role": "user", + "content": ( + "Template:\n" + f"{template_text}\n\n" + "Messages:\n" + f"{transcript}" + ), + }, + ] + try: + summary = str(await ai_runner.run_prompt(prompt, ai_obj, operation="command_bp_extract") or "").strip() + if not summary: + raise RuntimeError("empty_ai_response") + except Exception as exc: + run.status = "failed" + run.error = f"bp_ai_failed:{exc}" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=False, status="failed", error=run.error) + + annotation = self._annotation("legacy", len(rows)) + document = await self._persist_document( + run=run, + trigger=trigger, + profile=profile, + anchor=anchor, + content=summary, + mode="legacy_ai", + source_message_ids=[str(row.id) for row in rows], + annotation=annotation, + ) + + fanout_stats = {"sent_bindings": 0, "failed_bindings": 0} + if "post_result" in action_types: + fanout_stats = await self._fanout(run, summary) + + if "status_in_source" == profile.visibility_mode: + status_text = f"[bp] Generated business plan: {document.title}" + sent_count = int(fanout_stats.get("sent_bindings") or 0) + failed_count = int(fanout_stats.get("failed_bindings") or 0) + if sent_count or failed_count: + status_text += f" · fanout sent:{sent_count}" + if failed_count: + status_text += f" failed:{failed_count}" + await post_status_in_source( + trigger_message=trigger, + text=status_text, + origin_tag=f"bp-status:{trigger.id}", + ) + + run.status = "ok" + run.error = "" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + return CommandResult(ok=True, status="ok", payload={"document_id": str(document.id)}) + + async def execute(self, ctx: CommandContext) -> CommandResult: + trigger = await sync_to_async( + lambda: Message.objects.select_related("user", "session").filter(id=ctx.message_id).first() + )() + if trigger is None: + return CommandResult(ok=False, status="failed", error="trigger_not_found") + + profile = await sync_to_async( + lambda: trigger.user.commandprofile_set.filter(slug=self.slug, enabled=True).first() + )() + if profile is None: + return CommandResult(ok=False, status="skipped", error="profile_missing") + + actions = await sync_to_async(list)( + CommandAction.objects.filter(profile=profile, enabled=True).order_by("position", "id") + ) + action_types = {row.action_type for row in actions} + if "extract_bp" not in action_types: + return CommandResult(ok=False, status="skipped", error="extract_bp_disabled") + + run, created = await sync_to_async(CommandRun.objects.get_or_create)( + profile=profile, + trigger_message=trigger, + defaults={"user": trigger.user, "status": "running"}, + ) + if not created and run.status in {"ok", "running"}: + return CommandResult(ok=True, status="ok", payload={"document_id": str(run.result_ref_id or "")}) + + run.status = "running" + run.error = "" + await sync_to_async(run.save)(update_fields=["status", "error", "updated_at"]) + + parsed = parse_bp_subcommand(ctx.message_text) + if parsed.command and bool(getattr(settings, "BP_SUBCOMMANDS_V1", True)): + return await self._execute_set_or_range( + trigger=trigger, + run=run, + profile=profile, + action_types=action_types, + parsed=parsed, + ) + + return await self._execute_legacy_ai( + trigger=trigger, + run=run, + profile=profile, + action_types=action_types, + ctx=ctx, ) diff --git a/core/messaging/text_export.py b/core/messaging/text_export.py new file mode 100644 index 0000000..da560aa --- /dev/null +++ b/core/messaging/text_export.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from typing import Iterable + +from core.models import Message + + +def normalize_message_text(message: Message) -> str: + text = str(getattr(message, "text", "") or "").strip() + return text or "(no text)" + + +def plain_text_lines(messages: Iterable[Message]) -> list[str]: + return [normalize_message_text(message) for message in list(messages)] + + +def plain_text_blob(messages: Iterable[Message]) -> str: + return "\n".join(plain_text_lines(messages)) diff --git a/core/migrations/0029_answermemory_answersuggestionevent_chattasksource_and_more.py b/core/migrations/0029_answermemory_answersuggestionevent_chattasksource_and_more.py new file mode 100644 index 0000000..e38ae5e --- /dev/null +++ b/core/migrations/0029_answermemory_answersuggestionevent_chattasksource_and_more.py @@ -0,0 +1,341 @@ +# Generated by Django 5.2.11 on 2026-03-02 11:55 + +import django.db.models.deletion +import uuid +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0028_airunlog'), + ] + + operations = [ + migrations.CreateModel( + name='AnswerMemory', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('service', models.CharField(choices=[('signal', 'Signal'), ('whatsapp', 'WhatsApp'), ('xmpp', 'XMPP'), ('instagram', 'Instagram'), ('web', 'Web')], max_length=255)), + ('channel_identifier', models.CharField(max_length=255)), + ('question_fingerprint', models.CharField(max_length=128)), + ('question_text', models.TextField(blank=True, default='')), + ('answer_text', models.TextField(blank=True, default='')), + ('confidence_meta', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + ), + migrations.CreateModel( + name='AnswerSuggestionEvent', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('status', models.CharField(choices=[('suggested', 'Suggested'), ('accepted', 'Accepted'), ('dismissed', 'Dismissed')], default='suggested', max_length=32)), + ('score', models.FloatField(default=0.0)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='ChatTaskSource', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('service', models.CharField(choices=[('signal', 'Signal'), ('whatsapp', 'WhatsApp'), ('xmpp', 'XMPP'), ('instagram', 'Instagram'), ('web', 'Web')], max_length=255)), + ('channel_identifier', models.CharField(max_length=255)), + ('enabled', models.BooleanField(default=True)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='DerivedTask', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('title', models.CharField(max_length=255)), + ('source_service', models.CharField(choices=[('signal', 'Signal'), ('whatsapp', 'WhatsApp'), ('xmpp', 'XMPP'), ('instagram', 'Instagram'), ('web', 'Web')], max_length=255)), + ('source_channel', models.CharField(max_length=255)), + ('reference_code', models.CharField(blank=True, default='', max_length=64)), + ('external_key', models.CharField(blank=True, default='', max_length=255)), + ('status_snapshot', models.CharField(blank=True, default='open', max_length=64)), + ('immutable_payload', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + ), + migrations.CreateModel( + name='DerivedTaskEvent', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('event_type', models.CharField(choices=[('created', 'Created'), ('progress', 'Progress'), ('completion_marked', 'Completion Marked'), ('synced', 'Synced'), ('sync_failed', 'Sync Failed'), ('parse_warning', 'Parse Warning')], max_length=32)), + ('actor_identifier', models.CharField(blank=True, default='', max_length=255)), + ('payload', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ], + options={ + 'ordering': ['created_at', 'id'], + }, + ), + migrations.CreateModel( + name='ExternalSyncEvent', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('provider', models.CharField(default='mock', max_length=64)), + ('idempotency_key', models.CharField(blank=True, default='', max_length=255)), + ('status', models.CharField(choices=[('pending', 'Pending'), ('ok', 'OK'), ('failed', 'Failed'), ('retrying', 'Retrying')], default='pending', max_length=32)), + ('payload', models.JSONField(blank=True, default=dict)), + ('error', models.TextField(blank=True, default='')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='TaskCompletionPattern', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('phrase', models.CharField(max_length=64)), + ('enabled', models.BooleanField(default=True)), + ('position', models.PositiveIntegerField(default=0)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='TaskEpic', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('name', models.CharField(max_length=255)), + ('external_key', models.CharField(blank=True, default='', max_length=255)), + ('active', models.BooleanField(default=True)), + ('settings', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='TaskProject', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('name', models.CharField(max_length=255)), + ('external_key', models.CharField(blank=True, default='', max_length=255)), + ('active', models.BooleanField(default=True)), + ('settings', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.CreateModel( + name='TaskProviderConfig', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('provider', models.CharField(default='mock', max_length=64)), + ('enabled', models.BooleanField(default=False)), + ('settings', models.JSONField(blank=True, default=dict)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + ), + migrations.RenameIndex( + model_name='airunlog', + new_name='core_airunl_user_id_13b24a_idx', + old_name='core_airunl_user_id_6f4700_idx', + ), + migrations.RenameIndex( + model_name='airunlog', + new_name='core_airunl_user_id_678025_idx', + old_name='core_airunl_user_id_b4486e_idx', + ), + migrations.RenameIndex( + model_name='airunlog', + new_name='core_airunl_user_id_55c2d4_idx', + old_name='core_airunl_user_id_4f0f5e_idx', + ), + migrations.RenameIndex( + model_name='airunlog', + new_name='core_airunl_user_id_bef024_idx', + old_name='core_airunl_user_id_953bff_idx', + ), + migrations.AddField( + model_name='answermemory', + name='answer_message', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='answer_memory_rows', to='core.message'), + ), + migrations.AddField( + model_name='answermemory', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answer_memory', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='answersuggestionevent', + name='candidate_answer', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='suggestion_events', to='core.answermemory'), + ), + migrations.AddField( + model_name='answersuggestionevent', + name='message', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answer_suggestion_events', to='core.message'), + ), + migrations.AddField( + model_name='answersuggestionevent', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answer_suggestion_events', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='chattasksource', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chat_task_sources', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='derivedtask', + name='origin_message', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='derived_task_origins', to='core.message'), + ), + migrations.AddField( + model_name='derivedtask', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='derived_tasks', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='derivedtaskevent', + name='source_message', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='derived_task_events', to='core.message'), + ), + migrations.AddField( + model_name='derivedtaskevent', + name='task', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='events', to='core.derivedtask'), + ), + migrations.AddField( + model_name='externalsyncevent', + name='task', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='external_sync_events', to='core.derivedtask'), + ), + migrations.AddField( + model_name='externalsyncevent', + name='task_event', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='external_sync_events', to='core.derivedtaskevent'), + ), + migrations.AddField( + model_name='externalsyncevent', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='external_sync_events', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='taskcompletionpattern', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_completion_patterns', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='derivedtask', + name='epic', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='derived_tasks', to='core.taskepic'), + ), + migrations.AddField( + model_name='chattasksource', + name='epic', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='chat_sources', to='core.taskepic'), + ), + migrations.AddField( + model_name='taskproject', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_projects', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='taskepic', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='epics', to='core.taskproject'), + ), + migrations.AddField( + model_name='derivedtask', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='derived_tasks', to='core.taskproject'), + ), + migrations.AddField( + model_name='chattasksource', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chat_sources', to='core.taskproject'), + ), + migrations.AddField( + model_name='taskproviderconfig', + name='user', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_provider_configs', to=settings.AUTH_USER_MODEL), + ), + migrations.AddIndex( + model_name='answermemory', + index=models.Index(fields=['user', 'service', 'channel_identifier', 'created_at'], name='core_answer_user_id_b88ba6_idx'), + ), + migrations.AddIndex( + model_name='answermemory', + index=models.Index(fields=['user', 'question_fingerprint', 'created_at'], name='core_answer_user_id_9353c7_idx'), + ), + migrations.AddIndex( + model_name='answersuggestionevent', + index=models.Index(fields=['user', 'status', 'created_at'], name='core_answer_user_id_05d0f9_idx'), + ), + migrations.AddIndex( + model_name='answersuggestionevent', + index=models.Index(fields=['message', 'status'], name='core_answer_message_1cb119_idx'), + ), + migrations.AddIndex( + model_name='derivedtaskevent', + index=models.Index(fields=['task', 'created_at'], name='core_derive_task_id_897ae5_idx'), + ), + migrations.AddIndex( + model_name='derivedtaskevent', + index=models.Index(fields=['event_type', 'created_at'], name='core_derive_event_t_1cf04b_idx'), + ), + migrations.AddIndex( + model_name='externalsyncevent', + index=models.Index(fields=['user', 'provider', 'status', 'updated_at'], name='core_extern_user_id_e71276_idx'), + ), + migrations.AddIndex( + model_name='externalsyncevent', + index=models.Index(fields=['idempotency_key'], name='core_extern_idempot_dce064_idx'), + ), + migrations.AddIndex( + model_name='taskcompletionpattern', + index=models.Index(fields=['user', 'enabled', 'position'], name='core_taskco_user_id_0c1b5e_idx'), + ), + migrations.AddConstraint( + model_name='taskcompletionpattern', + constraint=models.UniqueConstraint(fields=('user', 'phrase'), name='unique_task_completion_phrase_per_user'), + ), + migrations.AddIndex( + model_name='taskproject', + index=models.Index(fields=['user', 'active', 'updated_at'], name='core_taskpr_user_id_4f8472_idx'), + ), + migrations.AddConstraint( + model_name='taskproject', + constraint=models.UniqueConstraint(fields=('user', 'name'), name='unique_task_project_name_per_user'), + ), + migrations.AddIndex( + model_name='taskepic', + index=models.Index(fields=['project', 'active', 'updated_at'], name='core_taskep_project_ea76c3_idx'), + ), + migrations.AddConstraint( + model_name='taskepic', + constraint=models.UniqueConstraint(fields=('project', 'name'), name='unique_task_epic_name_per_project'), + ), + migrations.AddIndex( + model_name='derivedtask', + index=models.Index(fields=['user', 'project', 'created_at'], name='core_derive_user_id_a98675_idx'), + ), + migrations.AddIndex( + model_name='derivedtask', + index=models.Index(fields=['user', 'source_service', 'source_channel'], name='core_derive_user_id_aaa167_idx'), + ), + migrations.AddIndex( + model_name='derivedtask', + index=models.Index(fields=['user', 'reference_code'], name='core_derive_user_id_d06303_idx'), + ), + migrations.AddIndex( + model_name='chattasksource', + index=models.Index(fields=['user', 'service', 'channel_identifier', 'enabled'], name='core_chatta_user_id_01f271_idx'), + ), + migrations.AddIndex( + model_name='chattasksource', + index=models.Index(fields=['project', 'enabled'], name='core_chatta_project_826bab_idx'), + ), + migrations.AddConstraint( + model_name='taskproviderconfig', + constraint=models.UniqueConstraint(fields=('user', 'provider'), name='unique_task_provider_config_per_user'), + ), + ] diff --git a/core/models.py b/core/models.py index 6136b17..8ff5630 100644 --- a/core/models.py +++ b/core/models.py @@ -1921,6 +1921,291 @@ class TranslationEventLog(models.Model): ] +class AnswerMemory(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="answer_memory") + service = models.CharField(max_length=255, choices=CHANNEL_SERVICE_CHOICES) + channel_identifier = models.CharField(max_length=255) + question_fingerprint = models.CharField(max_length=128) + question_text = models.TextField(blank=True, default="") + answer_message = models.ForeignKey( + Message, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="answer_memory_rows", + ) + answer_text = models.TextField(blank=True, default="") + confidence_meta = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + indexes = [ + models.Index(fields=["user", "service", "channel_identifier", "created_at"]), + models.Index(fields=["user", "question_fingerprint", "created_at"]), + ] + + +class AnswerSuggestionEvent(models.Model): + STATUS_CHOICES = ( + ("suggested", "Suggested"), + ("accepted", "Accepted"), + ("dismissed", "Dismissed"), + ) + + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey( + User, + on_delete=models.CASCADE, + related_name="answer_suggestion_events", + ) + message = models.ForeignKey( + Message, + on_delete=models.CASCADE, + related_name="answer_suggestion_events", + ) + status = models.CharField(max_length=32, choices=STATUS_CHOICES, default="suggested") + candidate_answer = models.ForeignKey( + AnswerMemory, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="suggestion_events", + ) + score = models.FloatField(default=0.0) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + indexes = [ + models.Index(fields=["user", "status", "created_at"]), + models.Index(fields=["message", "status"]), + ] + + +class TaskProject(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="task_projects") + name = models.CharField(max_length=255) + external_key = models.CharField(max_length=255, blank=True, default="") + active = models.BooleanField(default=True) + settings = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["user", "name"], + name="unique_task_project_name_per_user", + ) + ] + indexes = [models.Index(fields=["user", "active", "updated_at"])] + + +class TaskEpic(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + project = models.ForeignKey( + TaskProject, + on_delete=models.CASCADE, + related_name="epics", + ) + name = models.CharField(max_length=255) + external_key = models.CharField(max_length=255, blank=True, default="") + active = models.BooleanField(default=True) + settings = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["project", "name"], + name="unique_task_epic_name_per_project", + ) + ] + indexes = [models.Index(fields=["project", "active", "updated_at"])] + + +class ChatTaskSource(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="chat_task_sources") + service = models.CharField(max_length=255, choices=CHANNEL_SERVICE_CHOICES) + channel_identifier = models.CharField(max_length=255) + project = models.ForeignKey( + TaskProject, + on_delete=models.CASCADE, + related_name="chat_sources", + ) + epic = models.ForeignKey( + TaskEpic, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="chat_sources", + ) + enabled = models.BooleanField(default=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + indexes = [ + models.Index(fields=["user", "service", "channel_identifier", "enabled"]), + models.Index(fields=["project", "enabled"]), + ] + + +class DerivedTask(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="derived_tasks") + project = models.ForeignKey( + TaskProject, + on_delete=models.CASCADE, + related_name="derived_tasks", + ) + epic = models.ForeignKey( + TaskEpic, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="derived_tasks", + ) + title = models.CharField(max_length=255) + source_service = models.CharField(max_length=255, choices=CHANNEL_SERVICE_CHOICES) + source_channel = models.CharField(max_length=255) + origin_message = models.ForeignKey( + Message, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="derived_task_origins", + ) + reference_code = models.CharField(max_length=64, blank=True, default="") + external_key = models.CharField(max_length=255, blank=True, default="") + status_snapshot = models.CharField(max_length=64, blank=True, default="open") + immutable_payload = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + indexes = [ + models.Index(fields=["user", "project", "created_at"]), + models.Index(fields=["user", "source_service", "source_channel"]), + models.Index(fields=["user", "reference_code"]), + ] + + +class DerivedTaskEvent(models.Model): + EVENT_CHOICES = ( + ("created", "Created"), + ("progress", "Progress"), + ("completion_marked", "Completion Marked"), + ("synced", "Synced"), + ("sync_failed", "Sync Failed"), + ("parse_warning", "Parse Warning"), + ) + + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + task = models.ForeignKey( + DerivedTask, + on_delete=models.CASCADE, + related_name="events", + ) + event_type = models.CharField(max_length=32, choices=EVENT_CHOICES) + actor_identifier = models.CharField(max_length=255, blank=True, default="") + source_message = models.ForeignKey( + Message, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="derived_task_events", + ) + payload = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + ordering = ["created_at", "id"] + indexes = [ + models.Index(fields=["task", "created_at"]), + models.Index(fields=["event_type", "created_at"]), + ] + + +class ExternalSyncEvent(models.Model): + STATUS_CHOICES = ( + ("pending", "Pending"), + ("ok", "OK"), + ("failed", "Failed"), + ("retrying", "Retrying"), + ) + + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="external_sync_events") + task = models.ForeignKey( + DerivedTask, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="external_sync_events", + ) + task_event = models.ForeignKey( + DerivedTaskEvent, + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="external_sync_events", + ) + provider = models.CharField(max_length=64, default="mock") + idempotency_key = models.CharField(max_length=255, blank=True, default="") + status = models.CharField(max_length=32, choices=STATUS_CHOICES, default="pending") + payload = models.JSONField(default=dict, blank=True) + error = models.TextField(blank=True, default="") + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + indexes = [ + models.Index(fields=["user", "provider", "status", "updated_at"]), + models.Index(fields=["idempotency_key"]), + ] + + +class TaskProviderConfig(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="task_provider_configs") + provider = models.CharField(max_length=64, default="mock") + enabled = models.BooleanField(default=False) + settings = models.JSONField(default=dict, blank=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["user", "provider"], + name="unique_task_provider_config_per_user", + ) + ] + + +class TaskCompletionPattern(models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="task_completion_patterns") + phrase = models.CharField(max_length=64) + enabled = models.BooleanField(default=True) + position = models.PositiveIntegerField(default=0) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + constraints = [ + models.UniqueConstraint( + fields=["user", "phrase"], + name="unique_task_completion_phrase_per_user", + ) + ] + indexes = [models.Index(fields=["user", "enabled", "position"])] + + # class Perms(models.Model): # class Meta: # permissions = ( diff --git a/core/modules/router.py b/core/modules/router.py index 4cb9392..efd1cef 100644 --- a/core/modules/router.py +++ b/core/modules/router.py @@ -8,6 +8,7 @@ from core.clients.instagram import InstagramClient from core.clients.signal import SignalClient from core.clients.whatsapp import WhatsAppClient from core.clients.xmpp import XMPPClient +from core.assist.engine import process_inbound_assist from core.commands.base import CommandContext from core.commands.engine import process_inbound_message from core.messaging import history @@ -122,6 +123,10 @@ class UnifiedRouter(object): await process_inbound_translation(local_message) except Exception as exc: self.log.warning("Translation engine processing failed: %s", exc) + try: + await process_inbound_assist(local_message) + except Exception as exc: + self.log.warning("Assist/task processing failed: %s", exc) async def _resolve_identifier_objects(self, protocol, identifier): if isinstance(identifier, PersonIdentifier): diff --git a/core/tasks/__init__.py b/core/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/tasks/engine.py b/core/tasks/engine.py new file mode 100644 index 0000000..23fdb9b --- /dev/null +++ b/core/tasks/engine.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import re + +from asgiref.sync import sync_to_async +from django.conf import settings + +from core.clients.transport import send_message_raw +from core.messaging import ai as ai_runner +from core.models import ( + AI, + ChatTaskSource, + DerivedTask, + DerivedTaskEvent, + ExternalSyncEvent, + Message, + TaskCompletionPattern, + TaskProviderConfig, +) +from core.tasks.providers.mock import get_provider + +_TASK_HINT_RE = re.compile(r"\b(todo|task|action|need to|please)\b", re.IGNORECASE) +_COMPLETION_RE = re.compile(r"\b(done|completed|fixed)\s*#([A-Za-z0-9_-]+)\b", re.IGNORECASE) +_BALANCED_HINT_RE = re.compile(r"\b(todo|task|action item|action)\b", re.IGNORECASE) +_BROAD_HINT_RE = re.compile(r"\b(todo|task|action|need to|please|reminder)\b", re.IGNORECASE) + + +def _channel_variants(service: str, channel: str) -> list[str]: + value = str(channel or "").strip() + if not value: + return [] + variants = [value] + if str(service or "").strip().lower() == "whatsapp": + bare = value.split("@", 1)[0].strip() + if bare and bare not in variants: + variants.append(bare) + group = f"{bare}@g.us" if bare else "" + if group and group not in variants: + variants.append(group) + return variants + + +async def _resolve_source_mappings(message: Message) -> list[ChatTaskSource]: + variants = _channel_variants(message.source_service or "", message.source_chat_id or "") + if not variants: + return [] + return await sync_to_async(list)( + ChatTaskSource.objects.filter( + user=message.user, + enabled=True, + service=message.source_service, + channel_identifier__in=variants, + ).select_related("project", "epic") + ) + + +def _to_bool(raw, default=False) -> bool: + if raw is None: + return bool(default) + value = str(raw).strip().lower() + if value in {"1", "true", "yes", "on", "y"}: + return True + if value in {"0", "false", "no", "off", "n"}: + return False + return bool(default) + + +def _parse_prefixes(raw) -> list[str]: + if isinstance(raw, list): + values = raw + else: + values = str(raw or "").split(",") + rows = [] + for row in values: + item = str(row or "").strip().lower() + if item and item not in rows: + rows.append(item) + return rows or ["task:", "todo:", "action:"] + + +def _normalize_flags(raw: dict | None) -> dict: + row = dict(raw or {}) + return { + "derive_enabled": _to_bool(row.get("derive_enabled"), True), + "match_mode": str(row.get("match_mode") or "balanced").strip().lower() or "balanced", + "require_prefix": _to_bool(row.get("require_prefix"), False), + "allowed_prefixes": _parse_prefixes(row.get("allowed_prefixes")), + "completion_enabled": _to_bool(row.get("completion_enabled"), True), + "ai_title_enabled": _to_bool(row.get("ai_title_enabled"), True), + "announce_task_id": _to_bool(row.get("announce_task_id"), True), + "min_chars": max(1, int(row.get("min_chars") or 3)), + } + + +def _normalize_partial_flags(raw: dict | None) -> dict: + row = dict(raw or {}) + out = {} + if "derive_enabled" in row: + out["derive_enabled"] = _to_bool(row.get("derive_enabled"), True) + if "match_mode" in row: + out["match_mode"] = str(row.get("match_mode") or "balanced").strip().lower() or "balanced" + if "require_prefix" in row: + out["require_prefix"] = _to_bool(row.get("require_prefix"), False) + if "allowed_prefixes" in row: + out["allowed_prefixes"] = _parse_prefixes(row.get("allowed_prefixes")) + if "completion_enabled" in row: + out["completion_enabled"] = _to_bool(row.get("completion_enabled"), True) + if "ai_title_enabled" in row: + out["ai_title_enabled"] = _to_bool(row.get("ai_title_enabled"), True) + if "announce_task_id" in row: + out["announce_task_id"] = _to_bool(row.get("announce_task_id"), True) + if "min_chars" in row: + out["min_chars"] = max(1, int(row.get("min_chars") or 3)) + return out + + +def _effective_flags(source: ChatTaskSource) -> dict: + project_flags = _normalize_flags(getattr(getattr(source, "project", None), "settings", {}) or {}) + source_flags = _normalize_partial_flags(getattr(source, "settings", {}) or {}) + merged = dict(project_flags) + merged.update(source_flags) + return merged + + +def _is_task_candidate(text: str, flags: dict) -> bool: + body = str(text or "").strip() + if len(body) < int(flags.get("min_chars") or 1): + return False + body_lower = body.lower() + prefixes = list(flags.get("allowed_prefixes") or []) + has_prefix = any(body_lower.startswith(prefix) for prefix in prefixes) + if bool(flags.get("require_prefix")) and not has_prefix: + return False + mode = str(flags.get("match_mode") or "balanced").strip().lower() + if mode == "strict": + return has_prefix + if mode == "broad": + return has_prefix or bool(_BROAD_HINT_RE.search(body)) + return has_prefix or bool(_BALANCED_HINT_RE.search(body)) + + +def _next_reference(user, project) -> str: + last = ( + DerivedTask.objects.filter(user=user, project=project) + .exclude(reference_code="") + .order_by("-created_at") + .first() + ) + if not last: + return "1" + try: + return str(int(str(last.reference_code)) + 1) + except Exception: + return str(DerivedTask.objects.filter(user=user, project=project).count() + 1) + + +async def _derive_title(message: Message) -> str: + text = str(message.text or "").strip() + if not text: + return "Untitled task" + if not bool(getattr(settings, "TASK_DERIVATION_USE_AI", True)): + return text[:255] + ai_obj = await sync_to_async(lambda: AI.objects.filter(user=message.user).first())() + if not ai_obj: + return text[:255] + prompt = [ + { + "role": "system", + "content": "Extract one concise actionable task title from the message. Return plain text only.", + }, + {"role": "user", "content": text[:2000]}, + ] + try: + title = str(await ai_runner.run_prompt(prompt, ai_obj, operation="task_derive_title") or "").strip() + except Exception: + title = "" + return (title or text)[:255] + + +async def _derive_title_with_flags(message: Message, flags: dict) -> str: + if not bool(flags.get("ai_title_enabled", True)): + text = str(message.text or "").strip() + return (text or "Untitled task")[:255] + return await _derive_title(message) + + +async def _emit_sync_event(task: DerivedTask, event: DerivedTaskEvent, action: str) -> None: + cfg = await sync_to_async( + lambda: TaskProviderConfig.objects.filter(user=task.user, enabled=True).order_by("provider").first() + )() + provider_name = str(getattr(cfg, "provider", "mock") or "mock") + provider_settings = dict(getattr(cfg, "settings", {}) or {}) + provider = get_provider(provider_name) + idempotency_key = f"{provider_name}:{task.id}:{event.id}" + + if action == "create": + result = provider.create_task(provider_settings, { + "task_id": str(task.id), + "title": task.title, + "external_key": task.external_key, + "reference_code": task.reference_code, + }) + elif action == "complete": + result = provider.mark_complete(provider_settings, { + "task_id": str(task.id), + "external_key": task.external_key, + "reference_code": task.reference_code, + }) + else: + result = provider.append_update(provider_settings, { + "task_id": str(task.id), + "external_key": task.external_key, + "reference_code": task.reference_code, + "payload": event.payload, + }) + + status = "ok" if result.ok else "failed" + await sync_to_async(ExternalSyncEvent.objects.update_or_create)( + idempotency_key=idempotency_key, + defaults={ + "user": task.user, + "task": task, + "task_event": event, + "provider": provider_name, + "status": status, + "payload": dict(result.payload or {}), + "error": str(result.error or ""), + }, + ) + if result.ok and result.external_key and not task.external_key: + task.external_key = str(result.external_key) + await sync_to_async(task.save)(update_fields=["external_key"]) + + +async def _completion_regex(message: Message) -> re.Pattern: + patterns = await sync_to_async(list)( + TaskCompletionPattern.objects.filter(user=message.user, enabled=True).order_by("position", "created_at") + ) + phrases = [str(row.phrase or "").strip() for row in patterns if str(row.phrase or "").strip()] + if not phrases: + phrases = ["done", "completed", "fixed"] + return re.compile(r"\\b(?:" + "|".join(re.escape(p) for p in phrases) + r")\\s*#([A-Za-z0-9_-]+)\\b", re.IGNORECASE) + + +async def process_inbound_task_intelligence(message: Message) -> None: + if message is None: + return + if dict(message.message_meta or {}).get("origin_tag"): + return + text = str(message.text or "").strip() + if not text: + return + + sources = await _resolve_source_mappings(message) + if not sources: + return + + completion_allowed = any(bool(_effective_flags(source).get("completion_enabled")) for source in sources) + completion_rx = await _completion_regex(message) if completion_allowed else None + marker_match = (completion_rx.search(text) if completion_rx else None) or (_COMPLETION_RE.search(text) if completion_allowed else None) + if marker_match: + ref_code = str(marker_match.group(marker_match.lastindex or 1) or "").strip() + task = await sync_to_async( + lambda: DerivedTask.objects.filter(user=message.user, reference_code=ref_code).order_by("-created_at").first() + )() + if not task: + # parser warning event attached to a newly derived placeholder in mapped project + source = sources[0] + placeholder = await sync_to_async(DerivedTask.objects.create)( + user=message.user, + project=source.project, + epic=source.epic, + title=f"Unresolved completion marker #{ref_code}", + source_service=message.source_service or "web", + source_channel=message.source_chat_id or "", + origin_message=message, + reference_code=ref_code, + status_snapshot="warning", + immutable_payload={"warning": "completion_marker_unresolved"}, + ) + await sync_to_async(DerivedTaskEvent.objects.create)( + task=placeholder, + event_type="parse_warning", + actor_identifier=str(message.sender_uuid or ""), + source_message=message, + payload={"reason": "completion_marker_unresolved", "marker": ref_code}, + ) + return + + task.status_snapshot = "completed" + await sync_to_async(task.save)(update_fields=["status_snapshot"]) + event = await sync_to_async(DerivedTaskEvent.objects.create)( + task=task, + event_type="completion_marked", + actor_identifier=str(message.sender_uuid or ""), + source_message=message, + payload={"marker": ref_code}, + ) + await _emit_sync_event(task, event, "complete") + return + + for source in sources: + flags = _effective_flags(source) + if not bool(flags.get("derive_enabled", True)): + continue + if not _is_task_candidate(text, flags): + continue + title = await _derive_title_with_flags(message, flags) + reference = await sync_to_async(_next_reference)(message.user, source.project) + task = await sync_to_async(DerivedTask.objects.create)( + user=message.user, + project=source.project, + epic=source.epic, + title=title, + source_service=message.source_service or "web", + source_channel=message.source_chat_id or "", + origin_message=message, + reference_code=reference, + status_snapshot="open", + immutable_payload={"origin_text": text, "flags": flags}, + ) + event = await sync_to_async(DerivedTaskEvent.objects.create)( + task=task, + event_type="created", + actor_identifier=str(message.sender_uuid or ""), + source_message=message, + payload={"origin_text": text}, + ) + await _emit_sync_event(task, event, "create") + if bool(flags.get("announce_task_id", True)): + try: + await send_message_raw( + message.source_service or "web", + message.source_chat_id or "", + text=f"[task] Created #{task.reference_code}: {task.title}", + attachments=[], + metadata={"origin": "task_announce"}, + ) + except Exception: + # Announcement is best-effort and should not block derivation. + pass diff --git a/core/tasks/providers/__init__.py b/core/tasks/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/tasks/providers/base.py b/core/tasks/providers/base.py new file mode 100644 index 0000000..375945e --- /dev/null +++ b/core/tasks/providers/base.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass(slots=True) +class ProviderResult: + ok: bool + external_key: str = "" + error: str = "" + payload: dict | None = None + + +class TaskProvider: + name = "base" + + def healthcheck(self, config: dict) -> ProviderResult: + raise NotImplementedError + + def create_task(self, config: dict, payload: dict) -> ProviderResult: + raise NotImplementedError + + def append_update(self, config: dict, payload: dict) -> ProviderResult: + raise NotImplementedError + + def mark_complete(self, config: dict, payload: dict) -> ProviderResult: + raise NotImplementedError + + def link_task(self, config: dict, payload: dict) -> ProviderResult: + raise NotImplementedError diff --git a/core/tasks/providers/mock.py b/core/tasks/providers/mock.py new file mode 100644 index 0000000..c526394 --- /dev/null +++ b/core/tasks/providers/mock.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import time + +from .base import ProviderResult, TaskProvider + + +class MockTaskProvider(TaskProvider): + name = "mock" + + def healthcheck(self, config: dict) -> ProviderResult: + return ProviderResult(ok=True, payload={"provider": self.name}) + + def create_task(self, config: dict, payload: dict) -> ProviderResult: + ext = str(payload.get("external_key") or "") or f"mock-{int(time.time() * 1000)}" + return ProviderResult(ok=True, external_key=ext, payload={"action": "create_task"}) + + def append_update(self, config: dict, payload: dict) -> ProviderResult: + return ProviderResult(ok=True, external_key=str(payload.get("external_key") or ""), payload={"action": "append_update"}) + + def mark_complete(self, config: dict, payload: dict) -> ProviderResult: + return ProviderResult(ok=True, external_key=str(payload.get("external_key") or ""), payload={"action": "mark_complete"}) + + def link_task(self, config: dict, payload: dict) -> ProviderResult: + return ProviderResult(ok=True, external_key=str(payload.get("external_key") or ""), payload={"action": "link_task"}) + + +PROVIDERS = { + "mock": MockTaskProvider(), +} + + +def get_provider(name: str) -> TaskProvider: + return PROVIDERS.get(str(name or "").strip().lower(), PROVIDERS["mock"]) diff --git a/core/templates/base.html b/core/templates/base.html index eec83c5..9c35a0d 100644 --- a/core/templates/base.html +++ b/core/templates/base.html @@ -326,6 +326,9 @@ + + Tasks + AI diff --git a/core/templates/pages/command-routing.html b/core/templates/pages/command-routing.html index e6e47bc..cab8958 100644 --- a/core/templates/pages/command-routing.html +++ b/core/templates/pages/command-routing.html @@ -52,6 +52,14 @@
  • action post_result: fan out generated result to enabled egress bindings.
  • position: execution order (lower runs first).
  • + {% if profile.slug == "bp" %} +

    Supported Triggers (BP)

    +
      +
    • #bp#: primary BP trigger (uses the standard BP extraction flow).
    • +
    • #bp set#: deterministic no-AI set/update from reply/addendum text.
    • +
    • #bp set range#: deterministic no-AI set/update from reply-anchor to trigger range.
    • +
    + {% endif %}
    {% csrf_token %} diff --git a/core/templates/pages/tasks-detail.html b/core/templates/pages/tasks-detail.html new file mode 100644 index 0000000..d0e0000 --- /dev/null +++ b/core/templates/pages/tasks-detail.html @@ -0,0 +1,34 @@ +{% extends "base.html" %} +{% block content %} +
    +

    Task #{{ task.reference_code }}: {{ task.title }}

    +

    {{ task.project.name }}{% if task.epic %} / {{ task.epic.name }}{% endif %} · {{ task.status_snapshot }}

    + +
    +

    Events

    + + + + {% for row in events %} + + {% empty %} + + {% endfor %} + +
    WhenTypeActorPayload
    {{ row.created_at }}{{ row.event_type }}{{ row.actor_identifier }}{{ row.payload }}
    No events.
    +
    +
    +

    External Sync

    + + + + {% for row in sync_events %} + + {% empty %} + + {% endfor %} + +
    WhenProviderStatusError
    {{ row.updated_at }}{{ row.provider }}{{ row.status }}{{ row.error }}
    No sync events.
    +
    +
    +{% endblock %} diff --git a/core/templates/pages/tasks-epic.html b/core/templates/pages/tasks-epic.html new file mode 100644 index 0000000..95369d9 --- /dev/null +++ b/core/templates/pages/tasks-epic.html @@ -0,0 +1,16 @@ +{% extends "base.html" %} +{% block content %} +
    +

    Epic: {{ epic.name }}

    + + +
    +{% endblock %} diff --git a/core/templates/pages/tasks-group.html b/core/templates/pages/tasks-group.html new file mode 100644 index 0000000..e26a986 --- /dev/null +++ b/core/templates/pages/tasks-group.html @@ -0,0 +1,41 @@ +{% extends "base.html" %} +{% block content %} +
    +

    Group Tasks: {{ channel_display_name }}

    +

    {{ service_label }} · {{ identifier }}

    + {% if not tasks %} +
    +

    No Tasks Yet

    +
    +

    This group has no derived tasks yet. To start populating this view:

    +
      +
    1. Open Task Settings and confirm this chat is mapped under Group Mapping.
    2. +
    3. Send task-like messages in this group, for example: task: ship v1, todo: write tests, please review PR.
    4. +
    5. Mark completion explicitly with a phrase + reference, for example: done #12, completed #12, fixed #12.
    6. +
    7. Refresh this page; new derived tasks and events should appear automatically.
    8. +
    +
    +
    + {% endif %} +
    +

    Mappings

    +
      + {% for row in mappings %} +
    • {{ row.project.name }}{% if row.epic %} / {{ row.epic.name }}{% endif %}
    • + {% empty %} +
    • No mappings for this group.
    • + {% endfor %} +
    +
    + +
    +{% endblock %} diff --git a/core/templates/pages/tasks-hub.html b/core/templates/pages/tasks-hub.html new file mode 100644 index 0000000..f162e77 --- /dev/null +++ b/core/templates/pages/tasks-hub.html @@ -0,0 +1,47 @@ +{% extends "base.html" %} +{% block content %} +
    +
    +

    Tasks

    +

    Immutable tasks derived from chat activity.

    + +
    +
    +
    +

    Projects

    +
      + {% for project in projects %} +
    • {{ project.name }} ({{ project.task_count }})
    • + {% empty %} +
    • No projects yet.
    • + {% endfor %} +
    +
    +
    +
    +
    +

    Recent Derived Tasks

    + + + + {% for row in tasks %} + + + + + + + + {% empty %} + + {% endfor %} + +
    RefTitleProjectStatus
    #{{ row.reference_code }}{{ row.title }}{{ row.project.name }}{% if row.epic %} / {{ row.epic.name }}{% endif %}{{ row.status_snapshot }}Open
    No derived tasks yet.
    +
    +
    +
    +
    +
    +{% endblock %} diff --git a/core/templates/pages/tasks-project.html b/core/templates/pages/tasks-project.html new file mode 100644 index 0000000..dd3505d --- /dev/null +++ b/core/templates/pages/tasks-project.html @@ -0,0 +1,27 @@ +{% extends "base.html" %} +{% block content %} +
    +

    Project: {{ project.name }}

    + +
    +

    Epics

    +
      + {% for epic in epics %} +
    • {{ epic.name }}
    • + {% empty %} +
    • No epics.
    • + {% endfor %} +
    +
    + +
    +{% endblock %} diff --git a/core/templates/pages/tasks-settings.html b/core/templates/pages/tasks-settings.html new file mode 100644 index 0000000..da7b2c1 --- /dev/null +++ b/core/templates/pages/tasks-settings.html @@ -0,0 +1,386 @@ +{% extends "base.html" %} +{% block content %} +
    +
    +

    Task Settings

    +

    Configure task derivation, chat mapping, completion parsing, and external sync behavior.

    + +
    +

    Setting Definitions

    +
    +

    Projects: top-level containers for derived tasks. A single group can map to any project.

    +

    Epics: optional sub-grouping inside a project. Use these for parallel workstreams in the same project.

    +

    Group Mapping: binds a chat channel (service + channel identifier) to a project and optional epic. Task extraction only runs where mappings exist.

    +

    Matching Hierarchy: channel mapping flags override project flags. Project flags are defaults; mapping flags are per-chat precision controls.

    +

    False-Positive Controls: defaults are safe: match_mode=strict, require_prefix=true, and prefixes task:/todo:. Freeform matching is off by default.

    +

    Task ID Announcements: when enabled, newly derived tasks post an in-chat confirmation containing the new task reference (for example #17).

    +

    Completion Phrases: explicit trigger words used to detect completion markers like done #12, completed #12, fixed #12.

    +

    Provider: external sync adapter toggle. In current setup, mock provider validates append-only sync flow and retry behavior.

    +

    Sync Event Log: audit of provider sync attempts and outcomes. Retry replays the event without mutating immutable task source records.

    +
    +
    + + {% if prefill_service and prefill_identifier %} +
    +

    Quick Setup For Current Chat

    +

    Prefilled from compose for {{ prefill_service }} · {{ prefill_identifier }}. Create/update project + epic + channel mapping in one step.

    + + {% csrf_token %} + + + + + +
    +
    + + +
    +
    + + +
    +
    + +
    + +
    +
    +
    + + +
    +
    + + + + + + +
    + {% endif %} + +
    +
    +
    +

    Projects

    +

    Create project scopes used by group mappings and derived tasks.

    +
    + {% csrf_token %} + + + +
    + + +
    +
    + +
    + +
    +
    +
    + + +
    + + + + + +
    +
      + {% for row in projects %}
    • {{ row.name }}
    • {% empty %}
    • No projects.
    • {% endfor %} +
    +
    +
    + +
    +
    +

    Epics

    +

    Create project-local epics to refine routing and reporting.

    +
    + {% csrf_token %} + + + +
    + +
    + +
    +
    +
    + + +
    + +
    +
    +
    + +
    +
    +

    Group Mapping (Chat -> Project/Epic)

    +

    Each mapped group becomes eligible for derived task extraction and completion tracking.

    +
    + {% csrf_token %} + + + +
    +
    + +
    + +
    +
    +
    + + +
    +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + +
    +
    +
    +
    + +
    + +
    +
    +
    + + +
    +
    + + +
    +
    + + + + + +
    + + + + {% for row in sources %} + + {% empty %} + + {% endfor %} + +
    ChatProjectEpic
    {{ row.service }} · {{ row.channel_identifier }}{{ row.project.name }}{{ row.epic.name }}
    No mappings.
    +
    +
    + +
    +
    +

    Project Matching Flags

    +

    Project defaults apply to all mapped chats unless channel-level override changes them.

    +
    + {% csrf_token %} + + + +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + + +
    +
    + + +
    + + + + + +
    +
    + +
    +

    Channel Override Flags

    +

    These flags override project defaults for one mapped chat only.

    +
    + {% csrf_token %} + + + +
    + +
    + +
    +
    +
    + +
    + +
    +
    +
    + + +
    +
    + + +
    + + + + + + +
    +
    +
    + +
    +
    +

    Completion Phrases

    +

    Add parser phrases for completion statements followed by a task reference, e.g. done #12.

    +
    + {% csrf_token %} + + + +
    + + +
    + +
    +
      + {% for row in patterns %}
    • {{ row.phrase }}
    • {% empty %}
    • No phrases.
    • {% endfor %} +
    +
    +
    + +
    +
    +

    Provider

    +

    Enable/disable external sync adapter and review recent provider event outcomes.

    +
    + {% csrf_token %} + + + + + + +
    + + + + {% for row in sync_events %} + + + + + + + {% empty %} + + {% endfor %} + +
    UpdatedProviderStatus
    {{ row.updated_at }}{{ row.provider }}{{ row.status }} +
    + {% csrf_token %} + + + + + +
    +
    No sync events.
    +
    +
    +
    +
    +
    + + +{% endblock %} diff --git a/core/templates/partials/compose-panel.html b/core/templates/partials/compose-panel.html index 1f7a312..71427eb 100644 --- a/core/templates/partials/compose-panel.html +++ b/core/templates/partials/compose-panel.html @@ -117,6 +117,14 @@ Quick Insights + + + Tasks + + + + Task Setup + AI Workspace diff --git a/core/tests/test_bp_fallback.py b/core/tests/test_bp_fallback.py index 8661e0a..4dd333c 100644 --- a/core/tests/test_bp_fallback.py +++ b/core/tests/test_bp_fallback.py @@ -81,7 +81,7 @@ class BPFallbackTests(TransactionTestCase): model="gpt-4o-mini", ) - def test_bp_falls_back_to_draft_when_ai_fails(self): + def test_bp_fails_fast_when_ai_fails(self): anchor = Message.objects.create( user=self.user, session=self.session, @@ -119,11 +119,11 @@ class BPFallbackTests(TransactionTestCase): ) ) - self.assertTrue(result.ok) + self.assertFalse(result.ok) run = CommandRun.objects.get(trigger_message=trigger, profile=self.profile) - self.assertEqual("ok", run.status) + self.assertEqual("failed", run.status) self.assertIn("bp_ai_failed", str(run.error)) - self.assertTrue(BusinessPlanDocument.objects.filter(trigger_message=trigger).exists()) + self.assertFalse(BusinessPlanDocument.objects.filter(trigger_message=trigger).exists()) def test_bp_uses_same_ai_selection_order_as_compose(self): AI.objects.create( diff --git a/core/tests/test_bp_subcommands.py b/core/tests/test_bp_subcommands.py new file mode 100644 index 0000000..11e9a66 --- /dev/null +++ b/core/tests/test_bp_subcommands.py @@ -0,0 +1,205 @@ +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +from asgiref.sync import async_to_sync +from django.test import TransactionTestCase, override_settings + +from core.commands.base import CommandContext +from core.commands.handlers.bp import BPCommandHandler, parse_bp_subcommand +from core.models import ( + BusinessPlanDocument, + ChatSession, + CommandAction, + CommandChannelBinding, + CommandProfile, + Message, + Person, + PersonIdentifier, + User, +) + + +@override_settings(BP_SUBCOMMANDS_V1=True) +class BPSubcommandTests(TransactionTestCase): + def setUp(self): + self.user = User.objects.create_user( + username="bp-sub-user", + email="bp-sub@example.com", + password="x", + ) + self.person = Person.objects.create(user=self.user, name="Sub Person") + self.identifier = PersonIdentifier.objects.create( + user=self.user, + person=self.person, + service="whatsapp", + identifier="120363402761690215", + ) + self.session = ChatSession.objects.create(user=self.user, identifier=self.identifier) + self.profile = CommandProfile.objects.create( + user=self.user, + slug="bp", + name="Business Plan", + enabled=True, + trigger_token="#bp#", + reply_required=True, + exact_match_only=True, + ) + CommandChannelBinding.objects.create( + profile=self.profile, + direction="ingress", + service="whatsapp", + channel_identifier="120363402761690215", + enabled=True, + ) + for action_type, position in (("extract_bp", 0), ("save_document", 1)): + CommandAction.objects.create( + profile=self.profile, + action_type=action_type, + enabled=True, + position=position, + ) + + def _ctx(self, trigger: Message, text: str) -> CommandContext: + return CommandContext( + service="whatsapp", + channel_identifier="120363402761690215", + message_id=str(trigger.id), + user_id=self.user.id, + message_text=text, + payload={}, + ) + + def test_parser_detects_set_and_remainder(self): + parsed = parse_bp_subcommand(" #BP set# addendum text ") + self.assertEqual("set", parsed.command) + self.assertEqual("addendum text", parsed.remainder_text) + + def test_parser_detects_set_range(self): + parsed = parse_bp_subcommand("#bp set range# now") + self.assertEqual("set_range", parsed.command) + + def test_set_standalone_uses_remainder_only(self): + trigger = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="#bp set# direct body", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + with patch("core.commands.handlers.bp.ai_runner.run_prompt", new=AsyncMock()) as mocked_ai: + result = async_to_sync(BPCommandHandler().execute)(self._ctx(trigger, trigger.text)) + self.assertTrue(result.ok) + mocked_ai.assert_not_awaited() + doc = BusinessPlanDocument.objects.get(trigger_message=trigger) + self.assertEqual("direct body", doc.content_markdown) + self.assertEqual("Generated from 1 message.", doc.structured_payload.get("annotation")) + + def test_set_reply_only_uses_anchor(self): + anchor = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="anchor body", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + trigger = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="#bp set#", + ts=2000, + source_service="whatsapp", + source_chat_id="120363402761690215", + reply_to=anchor, + ) + result = async_to_sync(BPCommandHandler().execute)(self._ctx(trigger, trigger.text)) + self.assertTrue(result.ok) + doc = BusinessPlanDocument.objects.get(trigger_message=trigger) + self.assertEqual("anchor body", doc.content_markdown) + self.assertEqual("Generated from 1 message.", doc.structured_payload.get("annotation")) + + def test_set_reply_plus_addendum_uses_divider(self): + anchor = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="base body", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + trigger = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="#bp set# extra text", + ts=2000, + source_service="whatsapp", + source_chat_id="120363402761690215", + reply_to=anchor, + ) + result = async_to_sync(BPCommandHandler().execute)(self._ctx(trigger, trigger.text)) + self.assertTrue(result.ok) + doc = BusinessPlanDocument.objects.get(trigger_message=trigger) + self.assertIn("base body", doc.content_markdown) + self.assertIn("--- Addendum (newer message text) ---", doc.content_markdown) + self.assertIn("extra text", doc.content_markdown) + self.assertEqual( + "Generated from 1 message + 1 addendum.", + doc.structured_payload.get("annotation"), + ) + + def test_set_range_requires_reply(self): + trigger = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="#bp set range#", + ts=3000, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + result = async_to_sync(BPCommandHandler().execute)(self._ctx(trigger, trigger.text)) + self.assertFalse(result.ok) + self.assertEqual("failed", result.status) + self.assertEqual("bp_set_range_requires_reply_target", result.error) + + def test_set_range_exports_text_only_lines(self): + anchor = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="line 1", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="", + ts=1500, + source_service="whatsapp", + source_chat_id="120363402761690215", + ) + trigger = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="#bp set range#", + ts=2000, + source_service="whatsapp", + source_chat_id="120363402761690215", + reply_to=anchor, + ) + result = async_to_sync(BPCommandHandler().execute)(self._ctx(trigger, trigger.text)) + self.assertTrue(result.ok) + doc = BusinessPlanDocument.objects.get(trigger_message=trigger) + self.assertEqual("line 1\n(no text)\n#bp set range#", doc.content_markdown) + self.assertEqual("Generated from 3 messages.", doc.structured_payload.get("annotation")) diff --git a/core/tests/test_repeat_answer_and_tasks.py b/core/tests/test_repeat_answer_and_tasks.py new file mode 100644 index 0000000..8304496 --- /dev/null +++ b/core/tests/test_repeat_answer_and_tasks.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +from asgiref.sync import async_to_sync +from django.test import TestCase, override_settings + +from core.assist.repeat_answer import find_repeat_answer, learn_from_message +from core.models import ( + AnswerSuggestionEvent, + ChatSession, + ChatTaskSource, + DerivedTask, + DerivedTaskEvent, + Person, + PersonIdentifier, + TaskCompletionPattern, + TaskProject, + User, + Message, +) +from core.tasks.engine import process_inbound_task_intelligence + + +class RepeatAnswerTests(TestCase): + def setUp(self): + self.user = User.objects.create_user("repeat-user", "repeat@example.com", "x") + self.person = Person.objects.create(user=self.user, name="Repeat Person") + self.identifier = PersonIdentifier.objects.create( + user=self.user, + person=self.person, + service="whatsapp", + identifier="120363402761690215@g.us", + ) + self.session = ChatSession.objects.create(user=self.user, identifier=self.identifier) + + def test_suggest_only_for_repeated_group_question(self): + q1 = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="What is the deploy command?", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + ) + a1 = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="me", + text="Use make deploy-prod.", + ts=1200, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + reply_to=q1, + ) + async_to_sync(learn_from_message)(a1) + + q2 = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="What is the deploy command?", + ts=2000, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + ) + suggestion = async_to_sync(find_repeat_answer)(self.user, q2) + self.assertIsNotNone(suggestion) + self.assertIn("deploy", suggestion.answer_text.lower()) + self.assertTrue( + AnswerSuggestionEvent.objects.filter(message=q2, status="suggested").exists() + ) + + +@override_settings(TASK_DERIVATION_USE_AI=False) +class TaskEngineTests(TestCase): + def setUp(self): + self.user = User.objects.create_user("task-user", "task@example.com", "x") + self.person = Person.objects.create(user=self.user, name="Task Person") + self.identifier = PersonIdentifier.objects.create( + user=self.user, + person=self.person, + service="whatsapp", + identifier="120363402761690215@g.us", + ) + self.session = ChatSession.objects.create(user=self.user, identifier=self.identifier) + self.project = TaskProject.objects.create(user=self.user, name="Ops") + ChatTaskSource.objects.create( + user=self.user, + service="whatsapp", + channel_identifier="120363402761690215@g.us", + project=self.project, + enabled=True, + ) + TaskCompletionPattern.objects.create(user=self.user, phrase="done", enabled=True) + + def test_creates_derived_task_on_task_like_message(self): + m = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="Task: rotate credentials tonight", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + ) + async_to_sync(process_inbound_task_intelligence)(m) + task = DerivedTask.objects.get(origin_message=m) + self.assertEqual("open", task.status_snapshot) + self.assertTrue(task.reference_code) + self.assertTrue(DerivedTaskEvent.objects.filter(task=task, event_type="created").exists()) + + def test_marks_completion_from_regex_marker(self): + seed = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text="task: patch kernel", + ts=1000, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + ) + async_to_sync(process_inbound_task_intelligence)(seed) + task = DerivedTask.objects.get(origin_message=seed) + marker = Message.objects.create( + user=self.user, + session=self.session, + sender_uuid="peer", + text=f"done #{task.reference_code}", + ts=1100, + source_service="whatsapp", + source_chat_id="120363402761690215@g.us", + ) + async_to_sync(process_inbound_task_intelligence)(marker) + task.refresh_from_db() + self.assertEqual("completed", task.status_snapshot) + self.assertTrue( + DerivedTaskEvent.objects.filter(task=task, event_type="completion_marked").exists() + ) diff --git a/core/views/compose.py b/core/views/compose.py index ac0fb3a..f92e8a9 100644 --- a/core/views/compose.py +++ b/core/views/compose.py @@ -2427,7 +2427,19 @@ def _panel_context( "compose_quick_insights_url": reverse("compose_quick_insights"), "compose_history_sync_url": reverse("compose_history_sync"), "compose_toggle_command_url": reverse("compose_toggle_command"), + "compose_answer_suggestion_send_url": reverse("compose_answer_suggestion_send"), "compose_ws_url": ws_url, + "tasks_hub_url": reverse("tasks_hub"), + "tasks_group_url": reverse( + "tasks_group", + kwargs={ + "service": base["service"], + "identifier": base["identifier"] or "_", + }, + ), + "tasks_settings_scoped_url": ( + f"{reverse('tasks_settings')}?{urlencode({'service': base['service'], 'identifier': base['identifier'] or ''})}" + ), "ai_workspace_url": ( f"{reverse('ai_workspace')}?person={base['person'].id}" if base["person"] diff --git a/core/views/tasks.py b/core/views/tasks.py new file mode 100644 index 0000000..4535a26 --- /dev/null +++ b/core/views/tasks.py @@ -0,0 +1,461 @@ +from __future__ import annotations + +from urllib.parse import urlencode + +from asgiref.sync import async_to_sync +from django.contrib import messages +from django.contrib.auth.mixins import LoginRequiredMixin +from django.db.models import Count +from django.http import JsonResponse +from django.shortcuts import get_object_or_404, redirect, render +from django.views import View + +from core.clients.transport import send_message_raw +from core.models import ( + AnswerSuggestionEvent, + ChatTaskSource, + DerivedTask, + DerivedTaskEvent, + ExternalSyncEvent, + TaskCompletionPattern, + TaskEpic, + TaskProject, + TaskProviderConfig, + PersonIdentifier, + PlatformChatLink, +) +from core.tasks.providers.mock import get_provider + + +def _to_bool(raw, default=False) -> bool: + if raw is None: + return bool(default) + value = str(raw).strip().lower() + if value in {"1", "true", "yes", "on", "y"}: + return True + if value in {"0", "false", "no", "off", "n"}: + return False + return bool(default) + + +def _parse_prefixes(value: str) -> list[str]: + text = str(value or "").strip() + if not text: + return ["task:", "todo:"] + rows = [] + for row in text.split(","): + item = str(row or "").strip().lower() + if item and item not in rows: + rows.append(item) + return rows or ["task:", "todo:"] + + +def _flags_from_post(request, prefix: str = "") -> dict: + key = lambda name: f"{prefix}{name}" if prefix else name + return { + "derive_enabled": _to_bool(request.POST.get(key("derive_enabled")), True), + "match_mode": str(request.POST.get(key("match_mode")) or "strict").strip().lower() or "strict", + "require_prefix": _to_bool(request.POST.get(key("require_prefix")), True), + "allowed_prefixes": _parse_prefixes(str(request.POST.get(key("allowed_prefixes")) or "")), + "completion_enabled": _to_bool(request.POST.get(key("completion_enabled")), True), + "ai_title_enabled": _to_bool(request.POST.get(key("ai_title_enabled")), True), + "announce_task_id": _to_bool(request.POST.get(key("announce_task_id")), True), + "min_chars": max(1, int(str(request.POST.get(key("min_chars")) or "3").strip() or "3")), + } + + +def _flags_with_defaults(raw: dict | None) -> dict: + row = dict(raw or {}) + return { + "derive_enabled": _to_bool(row.get("derive_enabled"), True), + "match_mode": str(row.get("match_mode") or "strict").strip().lower() or "strict", + "require_prefix": _to_bool(row.get("require_prefix"), True), + "allowed_prefixes": _parse_prefixes(",".join(list(row.get("allowed_prefixes") or []))), + "completion_enabled": _to_bool(row.get("completion_enabled"), True), + "ai_title_enabled": _to_bool(row.get("ai_title_enabled"), True), + "announce_task_id": _to_bool(row.get("announce_task_id"), True), + "min_chars": max(1, int(row.get("min_chars") or 3)), + } + + +def _settings_redirect(request): + service = str(request.POST.get("prefill_service") or request.GET.get("service") or "").strip() + identifier = str(request.POST.get("prefill_identifier") or request.GET.get("identifier") or "").strip() + if service and identifier: + return redirect(f"{request.path}?{urlencode({'service': service, 'identifier': identifier})}") + return redirect("tasks_settings") + + +def _service_label(service: str) -> str: + key = str(service or "").strip().lower() + labels = { + "signal": "Signal", + "whatsapp": "WhatsApp", + "instagram": "Instagram", + "xmpp": "XMPP", + "web": "Web", + } + return labels.get(key, key.title() if key else "Unknown") + + +def _resolve_channel_display(user, service: str, identifier: str) -> dict: + service_key = str(service or "").strip().lower() + raw_identifier = str(identifier or "").strip() + bare_identifier = raw_identifier.split("@", 1)[0].strip() + variants = [raw_identifier] + if bare_identifier and bare_identifier not in variants: + variants.append(bare_identifier) + if service_key == "whatsapp": + group_identifier = f"{bare_identifier}@g.us" if bare_identifier else "" + if group_identifier and group_identifier not in variants: + variants.append(group_identifier) + + group_link = None + if bare_identifier: + group_link = ( + PlatformChatLink.objects.filter( + user=user, + service=service_key, + chat_identifier=bare_identifier, + is_group=True, + ) + .order_by("-id") + .first() + ) + + person_identifier = ( + PersonIdentifier.objects.filter( + user=user, + service=service_key, + identifier__in=variants, + ) + .select_related("person") + .order_by("-id") + .first() + ) + + display_name = "" + if group_link and str(group_link.chat_name or "").strip(): + display_name = str(group_link.chat_name or "").strip() + elif person_identifier and person_identifier.person_id: + display_name = str(person_identifier.person.name or "").strip() + if not display_name: + display_name = raw_identifier or bare_identifier or "Unknown chat" + + display_identifier = raw_identifier + if group_link: + display_identifier = ( + str(group_link.chat_jid or "").strip() + or (f"{bare_identifier}@g.us" if bare_identifier else raw_identifier) + ) + elif service_key == "whatsapp" and bare_identifier and not raw_identifier.endswith("@g.us"): + display_identifier = f"{bare_identifier}@g.us" + + return { + "service_key": service_key, + "service_label": _service_label(service_key), + "display_name": display_name, + "display_identifier": display_identifier or raw_identifier, + "variants": [row for row in variants if row], + } + + +class TasksHub(LoginRequiredMixin, View): + template_name = "pages/tasks-hub.html" + + def get(self, request): + projects = TaskProject.objects.filter(user=request.user).annotate( + task_count=Count("derived_tasks") + ).order_by("name") + tasks = ( + DerivedTask.objects.filter(user=request.user) + .select_related("project", "epic") + .order_by("-created_at")[:200] + ) + return render( + request, + self.template_name, + { + "projects": projects, + "tasks": tasks, + }, + ) + + +class TaskProjectDetail(LoginRequiredMixin, View): + template_name = "pages/tasks-project.html" + + def get(self, request, project_id): + project = get_object_or_404(TaskProject, id=project_id, user=request.user) + tasks = ( + DerivedTask.objects.filter(user=request.user, project=project) + .select_related("epic") + .order_by("-created_at") + ) + epics = TaskEpic.objects.filter(project=project).order_by("name") + return render( + request, + self.template_name, + { + "project": project, + "tasks": tasks, + "epics": epics, + }, + ) + + +class TaskEpicDetail(LoginRequiredMixin, View): + template_name = "pages/tasks-epic.html" + + def get(self, request, epic_id): + epic = get_object_or_404(TaskEpic, id=epic_id, project__user=request.user) + tasks = ( + DerivedTask.objects.filter(user=request.user, epic=epic) + .select_related("project") + .order_by("-created_at") + ) + return render(request, self.template_name, {"epic": epic, "tasks": tasks}) + + +class TaskGroupDetail(LoginRequiredMixin, View): + template_name = "pages/tasks-group.html" + + def get(self, request, service, identifier): + channel = _resolve_channel_display(request.user, service, identifier) + variants = list(channel.get("variants") or [str(identifier or "").strip()]) + mappings = ChatTaskSource.objects.filter( + user=request.user, + service=channel["service_key"], + channel_identifier__in=variants, + ).select_related("project", "epic") + tasks = ( + DerivedTask.objects.filter( + user=request.user, + source_service=channel["service_key"], + source_channel__in=variants, + ) + .select_related("project", "epic") + .order_by("-created_at") + ) + return render( + request, + self.template_name, + { + "service": channel["service_key"], + "service_label": channel["service_label"], + "identifier": channel["display_identifier"], + "channel_display_name": channel["display_name"], + "mappings": mappings, + "tasks": tasks, + }, + ) + + +class TaskDetail(LoginRequiredMixin, View): + template_name = "pages/tasks-detail.html" + + def get(self, request, task_id): + task = get_object_or_404( + DerivedTask.objects.select_related("project", "epic"), + id=task_id, + user=request.user, + ) + events = task.events.select_related("source_message").order_by("-created_at") + sync_events = task.external_sync_events.order_by("-created_at") + return render( + request, + self.template_name, + { + "task": task, + "events": events, + "sync_events": sync_events, + }, + ) + + +class TaskSettings(LoginRequiredMixin, View): + template_name = "pages/tasks-settings.html" + + def _context(self, request): + prefill_service = str(request.GET.get("service") or "").strip().lower() + prefill_identifier = str(request.GET.get("identifier") or "").strip() + projects = list(TaskProject.objects.filter(user=request.user).order_by("name")) + for row in projects: + row.settings_effective = _flags_with_defaults(row.settings) + row.allowed_prefixes_csv = ",".join(row.settings_effective["allowed_prefixes"]) + sources = list( + ChatTaskSource.objects.filter(user=request.user) + .select_related("project", "epic") + .order_by("service", "channel_identifier") + ) + for row in sources: + row.settings_effective = _flags_with_defaults(row.settings) + row.allowed_prefixes_csv = ",".join(row.settings_effective["allowed_prefixes"]) + return { + "projects": projects, + "epics": TaskEpic.objects.filter(project__user=request.user).select_related("project").order_by("project__name", "name"), + "sources": sources, + "patterns": TaskCompletionPattern.objects.filter(user=request.user).order_by("position", "created_at"), + "provider_configs": TaskProviderConfig.objects.filter(user=request.user).order_by("provider"), + "sync_events": ExternalSyncEvent.objects.filter(user=request.user).order_by("-updated_at")[:100], + "prefill_service": prefill_service, + "prefill_identifier": prefill_identifier, + } + + def get(self, request): + return render(request, self.template_name, self._context(request)) + + def post(self, request): + action = str(request.POST.get("action") or "").strip() + + if action == "project_create": + TaskProject.objects.create( + user=request.user, + name=str(request.POST.get("name") or "Project").strip() or "Project", + external_key=str(request.POST.get("external_key") or "").strip(), + active=bool(request.POST.get("active") or "1"), + settings=_flags_from_post(request), + ) + return _settings_redirect(request) + + if action == "epic_create": + project = get_object_or_404(TaskProject, id=request.POST.get("project_id"), user=request.user) + TaskEpic.objects.create( + project=project, + name=str(request.POST.get("name") or "Epic").strip() or "Epic", + external_key=str(request.POST.get("external_key") or "").strip(), + active=bool(request.POST.get("active") or "1"), + ) + return _settings_redirect(request) + + if action == "source_create": + project = get_object_or_404(TaskProject, id=request.POST.get("project_id"), user=request.user) + epic = None + epic_id = str(request.POST.get("epic_id") or "").strip() + if epic_id: + epic = get_object_or_404(TaskEpic, id=epic_id, project__user=request.user) + ChatTaskSource.objects.create( + user=request.user, + service=str(request.POST.get("service") or "web").strip(), + channel_identifier=str(request.POST.get("channel_identifier") or "").strip(), + project=project, + epic=epic, + enabled=bool(request.POST.get("enabled") or "1"), + settings=_flags_from_post(request, prefix="source_"), + ) + return _settings_redirect(request) + + if action == "quick_setup": + service = str(request.POST.get("service") or "web").strip().lower() or "web" + channel_identifier = str(request.POST.get("channel_identifier") or "").strip() + project_name = str(request.POST.get("project_name") or "").strip() or "General" + epic_name = str(request.POST.get("epic_name") or "").strip() + project, _ = TaskProject.objects.get_or_create( + user=request.user, + name=project_name, + defaults={"settings": _flags_from_post(request)}, + ) + if not project.settings: + project.settings = _flags_from_post(request) + project.save(update_fields=["settings", "updated_at"]) + epic = None + if epic_name: + epic, _ = TaskEpic.objects.get_or_create(project=project, name=epic_name) + if channel_identifier: + source, created = ChatTaskSource.objects.get_or_create( + user=request.user, + service=service, + channel_identifier=channel_identifier, + project=project, + defaults={ + "epic": epic, + "enabled": True, + "settings": _flags_from_post(request, prefix="source_"), + }, + ) + if not created: + source.project = project + source.epic = epic + source.enabled = True + source.settings = _flags_from_post(request, prefix="source_") + source.save(update_fields=["project", "epic", "enabled", "settings", "updated_at"]) + return _settings_redirect(request) + + if action == "project_flags_update": + project = get_object_or_404(TaskProject, id=request.POST.get("project_id"), user=request.user) + project.settings = _flags_from_post(request) + project.save(update_fields=["settings", "updated_at"]) + return _settings_redirect(request) + + if action == "source_flags_update": + source = get_object_or_404(ChatTaskSource, id=request.POST.get("source_id"), user=request.user) + source.settings = _flags_from_post(request, prefix="source_") + source.save(update_fields=["settings", "updated_at"]) + return _settings_redirect(request) + + if action == "pattern_create": + phrase = str(request.POST.get("phrase") or "").strip() + if phrase: + TaskCompletionPattern.objects.get_or_create( + user=request.user, + phrase=phrase, + defaults={"enabled": True, "position": TaskCompletionPattern.objects.filter(user=request.user).count()}, + ) + return _settings_redirect(request) + + if action == "provider_update": + provider = str(request.POST.get("provider") or "mock").strip() or "mock" + row, _ = TaskProviderConfig.objects.get_or_create( + user=request.user, + provider=provider, + defaults={"enabled": False, "settings": {}}, + ) + row.enabled = bool(request.POST.get("enabled")) + row.save(update_fields=["enabled", "updated_at"]) + return _settings_redirect(request) + + if action == "sync_retry": + event = get_object_or_404(ExternalSyncEvent, id=request.POST.get("event_id"), user=request.user) + provider = get_provider(event.provider) + payload = dict(event.payload or {}) + result = provider.append_update({}, payload) + event.status = "ok" if result.ok else "failed" + event.error = str(result.error or "") + event.payload = dict(payload, retried=True) + event.save(update_fields=["status", "error", "payload", "updated_at"]) + return _settings_redirect(request) + + return _settings_redirect(request) + + +class AnswerSuggestionSend(LoginRequiredMixin, View): + def post(self, request): + event = get_object_or_404( + AnswerSuggestionEvent.objects.select_related("candidate_answer", "message"), + id=request.POST.get("suggestion_id"), + user=request.user, + status="suggested", + ) + decision = str(request.POST.get("decision") or "accept").strip().lower() + if decision == "dismiss": + event.status = "dismissed" + event.save(update_fields=["status", "updated_at"]) + return JsonResponse({"ok": True, "status": "dismissed"}) + + text = str(getattr(event.candidate_answer, "answer_text", "") or "").strip() + msg = event.message + if not text: + return JsonResponse({"ok": False, "error": "empty_candidate_answer"}, status=400) + ok = async_to_sync(send_message_raw)( + msg.source_service or "web", + msg.source_chat_id or "", + text=text, + attachments=[], + metadata={"origin": "repeat_answer_suggestion"}, + ) + event.status = "accepted" if ok else "suggested" + event.save(update_fields=["status", "updated_at"]) + if not ok: + messages.error(request, "Failed to send suggestion message.") + return JsonResponse({"ok": False, "error": "send_failed"}, status=502) + return JsonResponse({"ok": True, "status": "accepted"})