Increase platform abstraction cohesion
This commit is contained in:
@@ -137,6 +137,8 @@ class DjangoMemorySearchBackend(BaseMemorySearchBackend):
|
||||
|
||||
class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
|
||||
name = "manticore"
|
||||
_table_ready_cache: dict[str, float] = {}
|
||||
_table_ready_ttl_seconds = 30.0
|
||||
|
||||
def __init__(self):
|
||||
self.base_url = str(
|
||||
@@ -146,6 +148,7 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
|
||||
getattr(settings, "MANTICORE_MEMORY_TABLE", "gia_memory_items")
|
||||
).strip() or "gia_memory_items"
|
||||
self.timeout_seconds = int(getattr(settings, "MANTICORE_HTTP_TIMEOUT", 5) or 5)
|
||||
self._table_cache_key = f"{self.base_url}|{self.table}"
|
||||
|
||||
def _sql(self, query: str) -> dict[str, Any]:
|
||||
response = requests.post(
|
||||
@@ -160,6 +163,9 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
|
||||
return dict(payload or {})
|
||||
|
||||
def ensure_table(self) -> None:
|
||||
last_ready = float(self._table_ready_cache.get(self._table_cache_key, 0.0) or 0.0)
|
||||
if (time.time() - last_ready) <= float(self._table_ready_ttl_seconds):
|
||||
return
|
||||
self._sql(
|
||||
(
|
||||
f"CREATE TABLE IF NOT EXISTS {self.table} ("
|
||||
@@ -175,6 +181,7 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
|
||||
")"
|
||||
)
|
||||
)
|
||||
self._table_ready_cache[self._table_cache_key] = time.time()
|
||||
|
||||
def _doc_id(self, memory_id: str) -> int:
|
||||
digest = hashlib.blake2b(
|
||||
@@ -206,11 +213,66 @@ class ManticoreMemorySearchBackend(BaseMemorySearchBackend):
|
||||
)
|
||||
self._sql(query)
|
||||
|
||||
def _build_upsert_values_clause(self, item: MemoryItem) -> str:
|
||||
memory_id = str(item.id)
|
||||
doc_id = self._doc_id(memory_id)
|
||||
summary = _flatten_to_text(item.content)[:280]
|
||||
body = _flatten_to_text(item.content)
|
||||
updated_ts = int(item.updated_at.timestamp() * 1000)
|
||||
return (
|
||||
f"({doc_id},'{self._escape(memory_id)}',{int(item.user_id)},"
|
||||
f"'{self._escape(item.conversation_id)}','{self._escape(item.memory_kind)}',"
|
||||
f"'{self._escape(item.status)}',{updated_ts},"
|
||||
f"'{self._escape(summary)}','{self._escape(body)}')"
|
||||
)
|
||||
|
||||
def delete(self, memory_id: str) -> None:
|
||||
self.ensure_table()
|
||||
doc_id = self._doc_id(memory_id)
|
||||
self._sql(f"DELETE FROM {self.table} WHERE id={doc_id}")
|
||||
|
||||
def reindex(
|
||||
self,
|
||||
*,
|
||||
user_id: int | None = None,
|
||||
include_statuses: tuple[str, ...] = ("active",),
|
||||
limit: int = 2000,
|
||||
) -> dict[str, int]:
|
||||
self.ensure_table()
|
||||
queryset = MemoryItem.objects.all().order_by("-updated_at")
|
||||
if user_id is not None:
|
||||
queryset = queryset.filter(user_id=int(user_id))
|
||||
if include_statuses:
|
||||
queryset = queryset.filter(status__in=list(include_statuses))
|
||||
|
||||
scanned = 0
|
||||
indexed = 0
|
||||
batch_size = 100
|
||||
values: list[str] = []
|
||||
for item in queryset[: max(1, int(limit))]:
|
||||
scanned += 1
|
||||
try:
|
||||
values.append(self._build_upsert_values_clause(item))
|
||||
except Exception as exc:
|
||||
log.warning("memory-search upsert build failed id=%s err=%s", item.id, exc)
|
||||
continue
|
||||
if len(values) >= batch_size:
|
||||
self._sql(
|
||||
f"REPLACE INTO {self.table} "
|
||||
"(id,memory_uuid,user_id,conversation_id,memory_kind,status,updated_ts,summary,body) "
|
||||
f"VALUES {','.join(values)}"
|
||||
)
|
||||
indexed += len(values)
|
||||
values = []
|
||||
if values:
|
||||
self._sql(
|
||||
f"REPLACE INTO {self.table} "
|
||||
"(id,memory_uuid,user_id,conversation_id,memory_kind,status,updated_ts,summary,body) "
|
||||
f"VALUES {','.join(values)}"
|
||||
)
|
||||
indexed += len(values)
|
||||
return {"scanned": scanned, "indexed": indexed}
|
||||
|
||||
def search(
|
||||
self,
|
||||
*,
|
||||
|
||||
Reference in New Issue
Block a user