Files
GIA/core/mcp/tools.py
2026-03-08 22:08:55 +00:00

1336 lines
47 KiB
Python

from __future__ import annotations
import datetime
import json
import time
from pathlib import Path
from typing import Any
from asgiref.sync import async_to_sync
from django.conf import settings
from django.db.models import Q
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from django.utils.text import slugify
from core.memory.pipeline import (
create_memory_change_request,
review_memory_change_request,
suggest_memories_from_recent_messages,
)
from core.memory.retrieval import retrieve_memories_for_prompt
from core.memory.search_backend import backend_status, get_memory_search_backend
from core.models import (
DerivedTask,
DerivedTaskEvent,
KnowledgeArticle,
KnowledgeRevision,
MCPToolAuditLog,
MemoryChangeRequest,
MemoryItem,
TaskArtifactLink,
TaskEpic,
TaskProject,
User,
WorkspaceConversation,
)
from core.tasks.engine import create_task_record_and_sync, mark_task_completed_and_sync
from core.util import logs
log = logs.get_logger("mcp-tools")
def _safe_limit(value: Any, default: int, low: int, high: int) -> int:
try:
parsed = int(value)
except (TypeError, ValueError):
parsed = default
return max(low, min(high, parsed))
def _coerce_statuses(
value: Any,
default: tuple[str, ...] = ("active",),
) -> tuple[str, ...]:
if isinstance(value, (list, tuple, set)):
statuses = [str(item or "").strip().lower() for item in value]
else:
statuses = [item.strip().lower() for item in str(value or "").split(",")]
cleaned = tuple(item for item in statuses if item)
return cleaned or default
def _coerce_tags(value: Any) -> list[str]:
if isinstance(value, (list, tuple, set)):
tags = [str(item or "").strip() for item in value]
else:
tags = [item.strip() for item in str(value or "").split(",")]
seen = set()
ordered: list[str] = []
for tag in tags:
if not tag:
continue
key = tag.lower()
if key in seen:
continue
seen.add(key)
ordered.append(tag)
return ordered
def _as_iso(value: Any) -> str:
return value.isoformat() if value else ""
def _task_payload(task: DerivedTask) -> dict[str, Any]:
return {
"id": str(task.id),
"title": str(task.title or ""),
"status_snapshot": str(task.status_snapshot or ""),
"reference_code": str(task.reference_code or ""),
"external_key": str(task.external_key or ""),
"source_service": str(task.source_service or ""),
"source_channel": str(task.source_channel or ""),
"project_id": str(task.project_id or ""),
"project_name": str(getattr(task.project, "name", "") or ""),
"epic_id": str(task.epic_id or ""),
"epic_name": str(getattr(task.epic, "name", "") or ""),
"created_at": _as_iso(task.created_at),
"immutable_payload": task.immutable_payload or {},
}
def _event_payload(event: DerivedTaskEvent) -> dict[str, Any]:
return {
"id": str(event.id),
"task_id": str(event.task_id),
"event_type": str(event.event_type or ""),
"actor_identifier": str(event.actor_identifier or ""),
"source_message_id": str(event.source_message_id or ""),
"payload": event.payload or {},
"created_at": _as_iso(event.created_at),
}
def _artifact_payload(link: TaskArtifactLink) -> dict[str, Any]:
return {
"id": str(link.id),
"task_id": str(link.task_id),
"kind": str(link.kind or ""),
"uri": str(link.uri or ""),
"path": str(link.path or ""),
"summary": str(link.summary or ""),
"created_by_identifier": str(link.created_by_identifier or ""),
"created_at": _as_iso(link.created_at),
}
def _article_payload(article: KnowledgeArticle) -> dict[str, Any]:
return {
"id": str(article.id),
"user_id": int(article.user_id),
"related_task_id": str(article.related_task_id or ""),
"title": str(article.title or ""),
"slug": str(article.slug or ""),
"markdown": str(article.markdown or ""),
"tags": list(article.tags or []),
"status": str(article.status or ""),
"owner_identifier": str(article.owner_identifier or ""),
"created_at": _as_iso(article.created_at),
"updated_at": _as_iso(article.updated_at),
}
def _revision_payload(revision: KnowledgeRevision) -> dict[str, Any]:
return {
"id": str(revision.id),
"article_id": str(revision.article_id),
"revision": int(revision.revision),
"author_tool": str(revision.author_tool or ""),
"author_identifier": str(revision.author_identifier or ""),
"summary": str(revision.summary or ""),
"markdown": str(revision.markdown or ""),
"created_at": _as_iso(revision.created_at),
}
def _memory_change_payload(req: MemoryChangeRequest) -> dict[str, Any]:
return {
"id": str(req.id),
"user_id": int(req.user_id),
"memory_id": str(req.memory_id or ""),
"conversation_id": str(req.conversation_id or ""),
"person_id": str(req.person_id or ""),
"action": str(req.action or ""),
"status": str(req.status or ""),
"proposed_memory_kind": str(req.proposed_memory_kind or ""),
"proposed_content": req.proposed_content or {},
"proposed_confidence_score": (
float(req.proposed_confidence_score)
if req.proposed_confidence_score is not None
else None
),
"proposed_expires_at": _as_iso(req.proposed_expires_at),
"reason": str(req.reason or ""),
"requested_by_identifier": str(req.requested_by_identifier or ""),
"reviewed_by_identifier": str(req.reviewed_by_identifier or ""),
"reviewed_at": _as_iso(req.reviewed_at),
"created_at": _as_iso(req.created_at),
"updated_at": _as_iso(req.updated_at),
}
def _parse_iso_datetime(value: Any) -> str:
raw = str(value or "").strip()
if not raw:
return ""
parsed = parse_datetime(raw)
if parsed is None:
raise ValueError("invalid ISO datetime")
if parsed.tzinfo is None:
parsed = timezone.make_aware(parsed, timezone.get_current_timezone())
return parsed.isoformat()
def _resolve_task(arguments: dict[str, Any]) -> DerivedTask:
task_id = str(arguments.get("task_id") or "").strip()
if not task_id:
raise ValueError("task_id is required")
user_id_raw = str(arguments.get("user_id") or "").strip()
queryset = DerivedTask.objects.select_related("project", "epic")
if user_id_raw:
queryset = queryset.filter(user_id=int(user_id_raw))
return queryset.get(id=task_id)
def _get_article_for_user(arguments: dict[str, Any]) -> KnowledgeArticle:
user_id = int(arguments.get("user_id"))
article_id = str(arguments.get("article_id") or "").strip()
slug = str(arguments.get("slug") or "").strip()
queryset = KnowledgeArticle.objects.filter(user_id=user_id)
if article_id:
return queryset.get(id=article_id)
if slug:
return queryset.get(slug=slug)
raise ValueError("article_id or slug is required")
def _next_unique_slug(*, user_id: int, requested_slug: str) -> str:
base = slugify(requested_slug)[:255].strip("-")
if not base:
raise ValueError("slug cannot be empty")
candidate = base
idx = 2
while KnowledgeArticle.objects.filter(
user_id=int(user_id), slug=candidate
).exists():
suffix = f"-{idx}"
candidate = f"{base[: max(1, 255 - len(suffix))]}{suffix}"
idx += 1
return candidate
def _create_revision(
*,
article: KnowledgeArticle,
markdown: str,
author_tool: str,
author_identifier: str,
summary: str,
) -> KnowledgeRevision:
last = article.revisions.order_by("-revision").first()
revision_no = int(last.revision if last else 0) + 1
return KnowledgeRevision.objects.create(
article=article,
revision=revision_no,
author_tool=str(author_tool or "mcp").strip(),
author_identifier=str(author_identifier or "").strip(),
summary=str(summary or "").strip(),
markdown=str(markdown or ""),
)
def _preview_meta(payload: Any) -> dict[str, Any]:
if isinstance(payload, dict):
keys = list(payload.keys())[:24]
meta = {"keys": keys}
if "count" in payload:
meta["count"] = payload.get("count")
if "id" in payload:
meta["id"] = payload.get("id")
return meta
return {"preview": str(payload)[:500]}
def _audit_user_from_args(arguments: dict[str, Any]) -> User | None:
user_id_raw = str(arguments.get("user_id") or "").strip()
if not user_id_raw:
return None
try:
user_id = int(user_id_raw)
except ValueError:
return None
return User.objects.filter(id=user_id).first()
def tool_manticore_status(arguments: dict[str, Any]) -> dict[str, Any]:
_ = arguments
status = backend_status()
status["ts_ms"] = int(time.time() * 1000)
return status
def tool_manticore_query(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
query = str(arguments.get("query") or "").strip()
conversation_id = str(arguments.get("conversation_id") or "").strip()
limit = _safe_limit(arguments.get("limit"), default=20, low=1, high=100)
statuses = _coerce_statuses(arguments.get("statuses"), default=("active",))
if not query:
raise ValueError("query is required")
backend = get_memory_search_backend()
hits = backend.search(
user_id=user_id,
query=query,
conversation_id=conversation_id,
limit=limit,
include_statuses=statuses,
)
return {
"backend": getattr(backend, "name", "unknown"),
"query": query,
"user_id": user_id,
"conversation_id": conversation_id,
"statuses": list(statuses),
"count": len(hits),
"hits": [
{
"memory_id": item.memory_id,
"score": item.score,
"summary": item.summary,
"payload": item.payload,
}
for item in hits
],
}
def tool_manticore_reindex(arguments: dict[str, Any]) -> dict[str, Any]:
user_id_raw = str(arguments.get("user_id") or "").strip()
user_id = int(user_id_raw) if user_id_raw else None
limit = _safe_limit(arguments.get("limit"), default=2000, low=1, high=20000)
statuses = _coerce_statuses(arguments.get("statuses"), default=("active",))
backend = get_memory_search_backend()
result = backend.reindex(user_id=user_id, include_statuses=statuses, limit=limit)
return {
"backend": getattr(backend, "name", "unknown"),
"user_id": user_id,
"statuses": list(statuses),
"limit": limit,
"result": result,
}
def tool_memory_list(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
query = str(arguments.get("query") or "").strip()
person_id = str(arguments.get("person_id") or "").strip()
conversation_id = str(arguments.get("conversation_id") or "").strip()
statuses = _coerce_statuses(arguments.get("statuses"), default=("active",))
limit = _safe_limit(arguments.get("limit"), default=30, low=1, high=200)
rows = retrieve_memories_for_prompt(
user_id=user_id,
query=query,
person_id=person_id,
conversation_id=conversation_id,
statuses=statuses,
limit=limit,
)
return {
"user_id": user_id,
"query": query,
"person_id": person_id,
"conversation_id": conversation_id,
"statuses": list(statuses),
"count": len(rows),
"items": rows,
}
def tool_memory_propose(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
conversation_id = str(arguments.get("conversation_id") or "").strip()
person_id = str(arguments.get("person_id") or "").strip()
memory_kind = str(arguments.get("memory_kind") or "fact").strip().lower()
content_raw = arguments.get("content")
if isinstance(content_raw, dict):
content = dict(content_raw)
else:
content = {"text": str(content_raw or "").strip()}
if not content:
raise ValueError("content is required")
confidence_score = float(arguments.get("confidence_score") or 0.5)
expires_at = _parse_iso_datetime(arguments.get("expires_at"))
reason = str(arguments.get("reason") or "").strip()
requested_by = str(arguments.get("requested_by_identifier") or "").strip()
conversation = WorkspaceConversation.objects.filter(
user_id=user_id,
id=conversation_id,
).first()
if conversation is None:
raise ValueError("conversation_id is required and must exist")
item = MemoryItem.objects.create(
user_id=user_id,
conversation=conversation,
person_id=person_id or None,
memory_kind=memory_kind,
status="proposed",
content=content,
provenance={"source": "mcp.memory.propose"},
confidence_score=confidence_score,
expires_at=parse_datetime(expires_at) if expires_at else None,
)
req = create_memory_change_request(
user_id=user_id,
action="create",
conversation_id=conversation_id,
person_id=person_id,
memory_id=str(item.id),
memory_kind=memory_kind,
content=content,
confidence_score=confidence_score,
expires_at=expires_at,
reason=reason,
requested_by_identifier=requested_by,
)
return {
"ok": True,
"memory_id": str(item.id),
"request": _memory_change_payload(req),
}
def tool_memory_pending(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
limit = _safe_limit(arguments.get("limit"), default=50, low=1, high=500)
rows = (
MemoryChangeRequest.objects.filter(user_id=user_id, status="pending")
.select_related("memory")
.order_by("created_at")[:limit]
)
items = [_memory_change_payload(item) for item in rows]
return {"count": len(items), "items": items}
def tool_memory_review(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
request_id = str(arguments.get("request_id") or "").strip()
decision = str(arguments.get("decision") or "").strip().lower()
reviewer_identifier = str(arguments.get("reviewer_identifier") or "").strip()
note = str(arguments.get("note") or "").strip()
if not request_id:
raise ValueError("request_id is required")
req = review_memory_change_request(
user_id=user_id,
request_id=request_id,
decision=decision,
reviewer_identifier=reviewer_identifier,
note=note,
)
memory = req.memory
return {
"request": _memory_change_payload(req),
"memory": (
{
"id": str(memory.id),
"status": str(memory.status),
"memory_kind": str(memory.memory_kind),
"content": memory.content or {},
"updated_at": _as_iso(memory.updated_at),
}
if memory is not None
else None
),
}
def tool_memory_suggest(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
limit_messages = _safe_limit(
arguments.get("limit_messages"),
default=300,
low=1,
high=2000,
)
max_items = _safe_limit(arguments.get("max_items"), default=30, low=1, high=500)
result = suggest_memories_from_recent_messages(
user_id=user_id,
limit_messages=limit_messages,
max_items=max_items,
)
return {
"user_id": user_id,
"limit_messages": limit_messages,
"max_items": max_items,
"result": result,
}
def tool_tasks_list(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
status = str(arguments.get("status") or "").strip().lower()
project_id = str(arguments.get("project_id") or "").strip()
query = str(arguments.get("query") or "").strip()
limit = _safe_limit(arguments.get("limit"), default=30, low=1, high=200)
queryset = (
DerivedTask.objects.filter(user_id=user_id)
.select_related("project", "epic")
.order_by("-created_at")
)
if status:
queryset = queryset.filter(status_snapshot__iexact=status)
if project_id:
queryset = queryset.filter(project_id=project_id)
if query:
queryset = queryset.filter(
Q(title__icontains=query)
| Q(reference_code__icontains=query)
| Q(external_key__icontains=query)
)
rows = [_task_payload(item) for item in queryset[:limit]]
return {"count": len(rows), "items": rows}
def tool_tasks_search(arguments: dict[str, Any]) -> dict[str, Any]:
query = str(arguments.get("query") or "").strip()
if not query:
raise ValueError("query is required")
return tool_tasks_list(arguments)
def tool_tasks_create(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
project_id = str(arguments.get("project_id") or "").strip()
title = str(arguments.get("title") or "").strip()
if not project_id:
raise ValueError("project_id is required")
if not title:
raise ValueError("title is required")
project = TaskProject.objects.filter(user_id=user_id, id=project_id).first()
if project is None:
raise ValueError("project_id not found")
epic_id = str(arguments.get("epic_id") or "").strip()
epic = None
if epic_id:
epic = TaskEpic.objects.filter(project=project, id=epic_id).first()
if epic is None:
raise ValueError("epic_id not found for project")
due_at = str(arguments.get("due_date") or "").strip()
due_date = None
if due_at:
try:
due_date = datetime.date.fromisoformat(due_at)
except Exception as exc:
raise ValueError("due_date must be YYYY-MM-DD") from exc
task, event = async_to_sync(create_task_record_and_sync)(
user=project.user,
project=project,
epic=epic,
title=title,
source_service=str(arguments.get("source_service") or "web").strip().lower()
or "web",
source_channel=str(arguments.get("source_channel") or "").strip(),
actor_identifier=str(arguments.get("actor_identifier") or "").strip(),
due_date=due_date,
assignee_identifier=str(arguments.get("assignee_identifier") or "").strip(),
immutable_payload={
"source": "mcp.tasks.create",
"requested_by": str(arguments.get("actor_identifier") or "").strip(),
},
event_payload={
"source": "mcp.tasks.create",
"via": "mcp",
},
)
return {"task": _task_payload(task), "event": _event_payload(event)}
def tool_tasks_get(arguments: dict[str, Any]) -> dict[str, Any]:
task = _resolve_task(arguments)
payload = _task_payload(task)
payload["artifact_links"] = [
_artifact_payload(item)
for item in task.artifact_links.order_by("-created_at")[:40]
]
payload["knowledge_articles"] = [
{
"id": str(article.id),
"slug": str(article.slug or ""),
"title": str(article.title or ""),
"status": str(article.status or ""),
"updated_at": _as_iso(article.updated_at),
}
for article in task.knowledge_articles.order_by("-updated_at")[:40]
]
return payload
def tool_tasks_events(arguments: dict[str, Any]) -> dict[str, Any]:
task = _resolve_task(arguments)
limit = _safe_limit(arguments.get("limit"), default=50, low=1, high=200)
rows = (
DerivedTaskEvent.objects.filter(task=task)
.select_related("task")
.order_by("-created_at")[:limit]
)
items = [_event_payload(item) for item in rows]
return {"count": len(items), "items": items}
def tool_tasks_create_note(arguments: dict[str, Any]) -> dict[str, Any]:
task = _resolve_task(arguments)
note = str(arguments.get("note") or "").strip()
actor_identifier = str(arguments.get("actor_identifier") or "").strip()
if not note:
raise ValueError("note is required")
event = DerivedTaskEvent.objects.create(
task=task,
event_type="progress",
actor_identifier=actor_identifier,
payload={"note": note, "source": "mcp.tasks.create_note"},
)
return {"task": _task_payload(task), "event": _event_payload(event)}
def tool_tasks_complete(arguments: dict[str, Any]) -> dict[str, Any]:
task = _resolve_task(arguments)
event = async_to_sync(mark_task_completed_and_sync)(
task=task,
actor_identifier=str(arguments.get("actor_identifier") or "").strip(),
payload={"source": "mcp.tasks.complete", "via": "mcp"},
)
task.refresh_from_db()
return {"task": _task_payload(task), "event": _event_payload(event)}
def tool_tasks_link_artifact(arguments: dict[str, Any]) -> dict[str, Any]:
task = _resolve_task(arguments)
kind = str(arguments.get("kind") or "").strip() or "note"
uri = str(arguments.get("uri") or "").strip()
path = str(arguments.get("path") or "").strip()
summary = str(arguments.get("summary") or "").strip()
created_by_identifier = str(arguments.get("created_by_identifier") or "").strip()
if not uri and not path:
raise ValueError("uri or path is required")
artifact = TaskArtifactLink.objects.create(
task=task,
kind=kind,
uri=uri,
path=path,
summary=summary,
created_by_identifier=created_by_identifier,
)
return {"task_id": str(task.id), "artifact": _artifact_payload(artifact)}
def tool_wiki_create_article(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
title = str(arguments.get("title") or "").strip()
markdown = str(arguments.get("markdown") or "")
related_task_id = str(arguments.get("related_task_id") or "").strip()
status = str(arguments.get("status") or "draft").strip().lower()
tags = _coerce_tags(arguments.get("tags"))
owner_identifier = str(arguments.get("owner_identifier") or "").strip()
author_identifier = str(arguments.get("author_identifier") or "").strip()
summary = str(arguments.get("summary") or "Initial revision.").strip()
if not title:
raise ValueError("title is required")
if status not in {"draft", "published", "archived"}:
raise ValueError("status must be draft/published/archived")
requested_slug = str(arguments.get("slug") or "").strip() or title
slug = _next_unique_slug(user_id=user_id, requested_slug=requested_slug)
related_task = None
if related_task_id:
related_task = DerivedTask.objects.filter(
user_id=user_id,
id=related_task_id,
).first()
if related_task is None:
raise ValueError("related_task_id not found")
article = KnowledgeArticle.objects.create(
user_id=user_id,
related_task=related_task,
title=title,
slug=slug,
markdown=markdown,
tags=tags,
status=status,
owner_identifier=owner_identifier,
)
revision = _create_revision(
article=article,
markdown=markdown,
author_tool="mcp",
author_identifier=author_identifier,
summary=summary,
)
return {
"article": _article_payload(article),
"revision": _revision_payload(revision),
}
def tool_wiki_update_article(arguments: dict[str, Any]) -> dict[str, Any]:
article = _get_article_for_user(arguments)
title = str(arguments.get("title") or "").strip()
markdown_marker = "markdown" in arguments
markdown = str(arguments.get("markdown") or "")
tags_marker = "tags" in arguments
status_marker = "status" in arguments
status = str(arguments.get("status") or "").strip().lower()
related_task_id = str(arguments.get("related_task_id") or "").strip()
summary = str(arguments.get("summary") or "Updated via MCP").strip()
author_identifier = str(arguments.get("author_identifier") or "").strip()
approve_overwrite = bool(arguments.get("approve_overwrite"))
approve_archive = bool(arguments.get("approve_archive"))
if markdown_marker and article.markdown and article.markdown != markdown:
if not approve_overwrite:
raise ValueError(
"approve_overwrite=true is required to overwrite existing markdown"
)
if status_marker and status == "archived" and article.status != "archived":
if not approve_archive:
raise ValueError("approve_archive=true is required to archive an article")
if title:
article.title = title
if markdown_marker:
article.markdown = markdown
if tags_marker:
article.tags = _coerce_tags(arguments.get("tags"))
if status_marker:
if status not in {"draft", "published", "archived"}:
raise ValueError("status must be draft/published/archived")
article.status = status
if related_task_id:
task = DerivedTask.objects.filter(
user_id=article.user_id,
id=related_task_id,
).first()
if task is None:
raise ValueError("related_task_id not found")
article.related_task = task
article.save()
revision = _create_revision(
article=article,
markdown=article.markdown,
author_tool="mcp",
author_identifier=author_identifier,
summary=summary,
)
return {
"article": _article_payload(article),
"revision": _revision_payload(revision),
}
def tool_wiki_list(arguments: dict[str, Any]) -> dict[str, Any]:
user_id = int(arguments.get("user_id"))
status = str(arguments.get("status") or "").strip().lower()
tag = str(arguments.get("tag") or "").strip().lower()
related_task_id = str(arguments.get("related_task_id") or "").strip()
query = str(arguments.get("query") or "").strip()
limit = _safe_limit(arguments.get("limit"), default=50, low=1, high=500)
queryset = KnowledgeArticle.objects.filter(user_id=user_id).order_by("-updated_at")
if status:
queryset = queryset.filter(status__iexact=status)
if related_task_id:
queryset = queryset.filter(related_task_id=related_task_id)
if tag:
queryset = queryset.filter(tags__icontains=tag)
if query:
queryset = queryset.filter(Q(title__icontains=query) | Q(slug__icontains=query))
rows = [_article_payload(item) for item in queryset[:limit]]
return {"count": len(rows), "items": rows}
def tool_wiki_get(arguments: dict[str, Any]) -> dict[str, Any]:
article = _get_article_for_user(arguments)
include_revisions = bool(arguments.get("include_revisions"))
revision_limit = _safe_limit(
arguments.get("revision_limit"), default=20, low=1, high=200
)
payload = {"article": _article_payload(article)}
if include_revisions:
revisions = article.revisions.order_by("-revision")[:revision_limit]
payload["revisions"] = [_revision_payload(item) for item in revisions]
return payload
def tool_project_get_guidelines(arguments: dict[str, Any]) -> dict[str, Any]:
max_chars = _safe_limit(
arguments.get("max_chars"), default=16000, low=500, high=50000
)
base = Path(settings.BASE_DIR).resolve()
file_names = ["AGENTS.md", "LLM_CODING_STANDARDS.md", "INSTALL.md", "README.md"]
payload = []
total = 0
for name in file_names:
path = (base / name).resolve()
if not path.exists():
continue
text = path.read_text(encoding="utf-8")
remaining = max_chars - total
if remaining <= 0:
break
selected = text[:remaining]
total += len(selected)
payload.append({"path": str(path), "content": selected})
return {"files": payload, "truncated": total >= max_chars}
def tool_project_get_layout(arguments: dict[str, Any]) -> dict[str, Any]:
max_entries = _safe_limit(
arguments.get("max_entries"), default=300, low=50, high=4000
)
base = Path(settings.BASE_DIR).resolve()
roots = ["app", "core", "scripts", "utilities", "artifacts"]
items: list[str] = []
for root in roots:
root_path = (base / root).resolve()
if not root_path.exists():
continue
items.append(f"{root}/")
for path in sorted(root_path.rglob("*")):
if len(items) >= max_entries:
return {"base_dir": str(base), "items": items, "truncated": True}
rel = path.relative_to(base)
if len(rel.parts) > 4:
continue
items.append(f"{rel.as_posix()}/" if path.is_dir() else rel.as_posix())
return {"base_dir": str(base), "items": items, "truncated": False}
def tool_project_get_runbook(arguments: dict[str, Any]) -> dict[str, Any]:
max_chars = _safe_limit(
arguments.get("max_chars"), default=16000, low=500, high=50000
)
base = Path(settings.BASE_DIR).resolve()
file_names = [
"INSTALL.md",
"README.md",
"artifacts/mcp/manticore-mcp-server.md",
"artifacts/plans/11-personal-ai-memory.md",
"artifacts/plans/12-mcp-server-for-tasks-and-knowledge.md",
]
payload = []
total = 0
for name in file_names:
path = (base / name).resolve()
if not path.exists():
continue
text = path.read_text(encoding="utf-8")
remaining = max_chars - total
if remaining <= 0:
break
selected = text[:remaining]
total += len(selected)
payload.append({"path": str(path), "content": selected})
return {"files": payload, "truncated": total >= max_chars}
def tool_docs_append_run_note(arguments: dict[str, Any]) -> dict[str, Any]:
content = str(arguments.get("content") or "").strip()
title = str(arguments.get("title") or "").strip() or "MCP Run Note"
task_id = str(arguments.get("task_id") or "").strip()
raw_path = str(arguments.get("path") or "").strip()
if not content:
raise ValueError("content is required")
base = Path(settings.BASE_DIR).resolve()
if not raw_path:
path = Path("/tmp/gia-mcp-run-notes.md")
else:
candidate = Path(raw_path)
path = (
candidate.resolve()
if candidate.is_absolute()
else (base / candidate).resolve()
)
allowed_roots = [base, Path("/tmp").resolve()]
if not any(str(path).startswith(str(root)) for root in allowed_roots):
raise ValueError("path must be within project root or /tmp")
path.parent.mkdir(parents=True, exist_ok=True)
ts = time.strftime("%Y-%m-%d %H:%M:%S UTC", time.gmtime())
lines = [f"## {title}", "", f"- Timestamp: {ts}"]
if task_id:
lines.append(f"- Task ID: `{task_id}`")
lines.extend(["", content, "", "---", ""])
text = "\n".join(lines)
with path.open("a", encoding="utf-8") as handle:
handle.write(text)
return {"ok": True, "path": str(path), "bytes_written": len(text)}
TOOL_DEFS: dict[str, dict[str, Any]] = {
"manticore.status": {
"description": "Report configured memory backend status (django or manticore).",
"inputSchema": {
"type": "object",
"properties": {},
"additionalProperties": False,
},
"handler": tool_manticore_status,
},
"manticore.query": {
"description": "Query memory index via configured backend.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"query": {"type": "string"},
"conversation_id": {"type": "string"},
"limit": {"type": "integer"},
"statuses": {
"anyOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}},
]
},
},
"required": ["user_id", "query"],
"additionalProperties": False,
},
"handler": tool_manticore_query,
},
"manticore.reindex": {
"description": "Reindex memory rows into configured backend.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"limit": {"type": "integer"},
"statuses": {
"anyOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}},
]
},
},
"additionalProperties": False,
},
"handler": tool_manticore_reindex,
},
"memory.list": {
"description": "List approved memories for prompt usage.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"query": {"type": "string"},
"person_id": {"type": "string"},
"conversation_id": {"type": "string"},
"statuses": {
"anyOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}},
]
},
"limit": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_memory_list,
},
"memory.propose": {
"description": "Create a pending memory proposal requiring review.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"conversation_id": {"type": "string"},
"person_id": {"type": "string"},
"memory_kind": {"type": "string"},
"content": {"anyOf": [{"type": "object"}, {"type": "string"}]},
"confidence_score": {"type": "number"},
"expires_at": {"type": "string"},
"reason": {"type": "string"},
"requested_by_identifier": {"type": "string"},
},
"required": ["user_id", "conversation_id", "content"],
"additionalProperties": False,
},
"handler": tool_memory_propose,
},
"memory.pending": {
"description": "List pending memory change requests.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"limit": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_memory_pending,
},
"memory.review": {
"description": "Approve or reject a pending memory request.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"request_id": {"type": "string"},
"decision": {"type": "string"},
"reviewer_identifier": {"type": "string"},
"note": {"type": "string"},
},
"required": ["user_id", "request_id", "decision"],
"additionalProperties": False,
},
"handler": tool_memory_review,
},
"memory.suggest_from_messages": {
"description": "Extract memory proposals from recent inbound messages.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"limit_messages": {"type": "integer"},
"max_items": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_memory_suggest,
},
"tasks.list": {
"description": "List derived tasks for a user.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"status": {"type": "string"},
"project_id": {"type": "string"},
"query": {"type": "string"},
"limit": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_tasks_list,
},
"tasks.search": {
"description": "Search derived tasks by free text for a user.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"query": {"type": "string"},
"status": {"type": "string"},
"project_id": {"type": "string"},
"limit": {"type": "integer"},
},
"required": ["user_id", "query"],
"additionalProperties": False,
},
"handler": tool_tasks_search,
},
"tasks.create": {
"description": "Create a canonical task inside GIA.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"project_id": {"type": "string"},
"epic_id": {"type": "string"},
"title": {"type": "string"},
"due_date": {"type": "string"},
"assignee_identifier": {"type": "string"},
"actor_identifier": {"type": "string"},
"source_service": {"type": "string"},
"source_channel": {"type": "string"},
},
"required": ["user_id", "project_id", "title"],
"additionalProperties": False,
},
"handler": tool_tasks_create,
},
"tasks.get": {
"description": "Get one derived task by ID, including links.",
"inputSchema": {
"type": "object",
"properties": {
"task_id": {"type": "string"},
"user_id": {"type": "integer"},
},
"required": ["task_id"],
"additionalProperties": False,
},
"handler": tool_tasks_get,
},
"tasks.events": {
"description": "List events for one derived task.",
"inputSchema": {
"type": "object",
"properties": {
"task_id": {"type": "string"},
"user_id": {"type": "integer"},
"limit": {"type": "integer"},
},
"required": ["task_id"],
"additionalProperties": False,
},
"handler": tool_tasks_events,
},
"tasks.create_note": {
"description": "Append an implementation/progress note to a task.",
"inputSchema": {
"type": "object",
"properties": {
"task_id": {"type": "string"},
"user_id": {"type": "integer"},
"note": {"type": "string"},
"actor_identifier": {"type": "string"},
},
"required": ["task_id", "note"],
"additionalProperties": False,
},
"handler": tool_tasks_create_note,
},
"tasks.complete": {
"description": "Mark a task completed and append a completion event.",
"inputSchema": {
"type": "object",
"properties": {
"task_id": {"type": "string"},
"user_id": {"type": "integer"},
"actor_identifier": {"type": "string"},
},
"required": ["task_id"],
"additionalProperties": False,
},
"handler": tool_tasks_complete,
},
"tasks.link_artifact": {
"description": "Link an artifact (URI/path) to a task.",
"inputSchema": {
"type": "object",
"properties": {
"task_id": {"type": "string"},
"user_id": {"type": "integer"},
"kind": {"type": "string"},
"uri": {"type": "string"},
"path": {"type": "string"},
"summary": {"type": "string"},
"created_by_identifier": {"type": "string"},
},
"required": ["task_id"],
"additionalProperties": False,
},
"handler": tool_tasks_link_artifact,
},
"wiki.create_article": {
"description": "Create a wiki article with initial revision.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"title": {"type": "string"},
"slug": {"type": "string"},
"markdown": {"type": "string"},
"tags": {
"anyOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}},
]
},
"status": {"type": "string"},
"related_task_id": {"type": "string"},
"owner_identifier": {"type": "string"},
"author_identifier": {"type": "string"},
"summary": {"type": "string"},
},
"required": ["user_id", "title"],
"additionalProperties": False,
},
"handler": tool_wiki_create_article,
},
"wiki.update_article": {
"description": "Update wiki article and append a revision entry.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"article_id": {"type": "string"},
"slug": {"type": "string"},
"title": {"type": "string"},
"markdown": {"type": "string"},
"tags": {
"anyOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}},
]
},
"status": {"type": "string"},
"related_task_id": {"type": "string"},
"summary": {"type": "string"},
"author_identifier": {"type": "string"},
"approve_overwrite": {"type": "boolean"},
"approve_archive": {"type": "boolean"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_wiki_update_article,
},
"wiki.list": {
"description": "List wiki articles for a user with filters.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"status": {"type": "string"},
"tag": {"type": "string"},
"related_task_id": {"type": "string"},
"query": {"type": "string"},
"limit": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_wiki_list,
},
"wiki.get": {
"description": "Get one wiki article by id/slug.",
"inputSchema": {
"type": "object",
"properties": {
"user_id": {"type": "integer"},
"article_id": {"type": "string"},
"slug": {"type": "string"},
"include_revisions": {"type": "boolean"},
"revision_limit": {"type": "integer"},
},
"required": ["user_id"],
"additionalProperties": False,
},
"handler": tool_wiki_get,
},
"project.get_guidelines": {
"description": "Load key project guideline documents.",
"inputSchema": {
"type": "object",
"properties": {"max_chars": {"type": "integer"}},
"additionalProperties": False,
},
"handler": tool_project_get_guidelines,
},
"project.get_layout": {
"description": "List major project files/directories for orientation.",
"inputSchema": {
"type": "object",
"properties": {"max_entries": {"type": "integer"}},
"additionalProperties": False,
},
"handler": tool_project_get_layout,
},
"project.get_runbook": {
"description": "Load operational runbook docs for agent continuity.",
"inputSchema": {
"type": "object",
"properties": {"max_chars": {"type": "integer"}},
"additionalProperties": False,
},
"handler": tool_project_get_runbook,
},
"docs.append_run_note": {
"description": "Append an implementation note markdown entry.",
"inputSchema": {
"type": "object",
"properties": {
"title": {"type": "string"},
"content": {"type": "string"},
"task_id": {"type": "string"},
"path": {"type": "string"},
},
"required": ["content"],
"additionalProperties": False,
},
"handler": tool_docs_append_run_note,
},
}
def tool_specs() -> list[dict[str, Any]]:
return [
{
"name": name,
"description": definition["description"],
"inputSchema": definition["inputSchema"],
}
for name, definition in TOOL_DEFS.items()
]
def execute_tool(name: str, arguments: dict[str, Any] | None = None) -> dict[str, Any]:
entry = TOOL_DEFS.get(str(name or "").strip())
if not entry:
raise ValueError(f"Unknown tool: {name}")
args = arguments or {}
handler = entry["handler"]
started = time.time()
audit_user = _audit_user_from_args(args)
try:
payload = handler(args)
duration_ms = int((time.time() - started) * 1000)
try:
MCPToolAuditLog.objects.create(
tool_name=str(name),
user=audit_user,
request_args=args,
response_meta=_preview_meta(payload),
ok=True,
duration_ms=max(0, duration_ms),
)
except Exception as exc:
log.warning("failed writing MCP success audit log: %s", exc)
return payload
except Exception as exc:
duration_ms = int((time.time() - started) * 1000)
try:
MCPToolAuditLog.objects.create(
tool_name=str(name),
user=audit_user,
request_args=args,
response_meta={},
ok=False,
error=str(exc),
duration_ms=max(0, duration_ms),
)
except Exception as audit_exc:
log.warning("failed writing MCP error audit log: %s", audit_exc)
raise
def format_tool_content(payload: dict[str, Any]) -> dict[str, Any]:
return {"content": [{"type": "text", "text": json.dumps(payload, indent=2)}]}