Increase platform abstraction cohesion
This commit is contained in:
11
core/management/commands/mcp_manticore_server.py
Normal file
11
core/management/commands/mcp_manticore_server.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.mcp.server import run_stdio_server
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Run GIA MCP stdio server with manticore/task/documentation tools."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
_ = args, options
|
||||
run_stdio_server()
|
||||
40
core/management/commands/memory_hygiene.py
Normal file
40
core/management/commands/memory_hygiene.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.memory.pipeline import run_memory_hygiene
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Run memory hygiene checks (stale decay + contradiction queueing)."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("--user-id", default="")
|
||||
parser.add_argument("--dry-run", action="store_true", default=False)
|
||||
parser.add_argument("--json", action="store_true", default=False)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
user_id_raw = str(options.get("user_id") or "").strip()
|
||||
dry_run = bool(options.get("dry_run"))
|
||||
as_json = bool(options.get("json"))
|
||||
user_id = int(user_id_raw) if user_id_raw else None
|
||||
|
||||
result = run_memory_hygiene(user_id=user_id, dry_run=dry_run)
|
||||
payload = {
|
||||
"user_id": user_id,
|
||||
"dry_run": dry_run,
|
||||
"result": result,
|
||||
}
|
||||
if as_json:
|
||||
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
|
||||
return
|
||||
self.stdout.write(
|
||||
"memory-hygiene "
|
||||
f"user={user_id if user_id is not None else '-'} "
|
||||
f"dry_run={'yes' if dry_run else 'no'} "
|
||||
f"expired={int(result.get('expired') or 0)} "
|
||||
f"contradictions={int(result.get('contradictions') or 0)} "
|
||||
f"queued={int(result.get('queued_requests') or 0)}"
|
||||
)
|
||||
46
core/management/commands/memory_suggest_from_messages.py
Normal file
46
core/management/commands/memory_suggest_from_messages.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from core.memory.pipeline import suggest_memories_from_recent_messages
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Suggest proposed MemoryItem rows from recent inbound message text."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("--user-id", required=True)
|
||||
parser.add_argument("--limit-messages", type=int, default=300)
|
||||
parser.add_argument("--max-items", type=int, default=30)
|
||||
parser.add_argument("--json", action="store_true", default=False)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
user_id_raw = str(options.get("user_id") or "").strip()
|
||||
if not user_id_raw:
|
||||
raise CommandError("--user-id is required")
|
||||
limit_messages = max(1, int(options.get("limit_messages") or 300))
|
||||
max_items = max(1, int(options.get("max_items") or 30))
|
||||
as_json = bool(options.get("json"))
|
||||
|
||||
result = suggest_memories_from_recent_messages(
|
||||
user_id=int(user_id_raw),
|
||||
limit_messages=limit_messages,
|
||||
max_items=max_items,
|
||||
)
|
||||
payload = {
|
||||
"user_id": int(user_id_raw),
|
||||
"limit_messages": limit_messages,
|
||||
"max_items": max_items,
|
||||
"result": result,
|
||||
}
|
||||
if as_json:
|
||||
self.stdout.write(json.dumps(payload, indent=2, sort_keys=True))
|
||||
return
|
||||
self.stdout.write(
|
||||
"memory-suggest-from-messages "
|
||||
f"user={payload['user_id']} "
|
||||
f"scanned={int(result.get('scanned') or 0)} "
|
||||
f"queued={int(result.get('queued') or 0)}"
|
||||
)
|
||||
Reference in New Issue
Block a user