Implement AI workspace and mitigation workflow
This commit is contained in:
@@ -1,26 +1,26 @@
|
||||
# Deferred processing library
|
||||
from core.util import logs
|
||||
from pydantic import BaseModel
|
||||
import asyncio
|
||||
from typing import Annotated, Optional
|
||||
from uuid import UUID
|
||||
from pydantic import ValidationError
|
||||
from core.models import QueuedMessage, Message, PersonIdentifier, User
|
||||
from core.clients import signal
|
||||
from core.lib.prompts.functions import delete_messages
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
from django.conf import settings
|
||||
from core.clients import signalapi
|
||||
import asyncio
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from core.clients import signal, signalapi
|
||||
from core.lib.prompts.functions import delete_messages
|
||||
from core.models import Message, PersonIdentifier, QueuedMessage, User
|
||||
from core.util import logs
|
||||
|
||||
log = logs.get_logger("deferred")
|
||||
|
||||
|
||||
class DeferredDetail(BaseModel):
|
||||
reply_to_self: bool
|
||||
reply_to_others: bool
|
||||
is_outgoing_message: bool
|
||||
|
||||
|
||||
class DeferredRequest(BaseModel):
|
||||
type: str
|
||||
method: str
|
||||
@@ -32,33 +32,35 @@ class DeferredRequest(BaseModel):
|
||||
detail: Optional[DeferredDetail] = None
|
||||
attachments: Optional[list] = None
|
||||
|
||||
|
||||
async def send_message(db_obj):
|
||||
recipient_uuid = db_obj.session.identifier.identifier
|
||||
text = db_obj.text
|
||||
recipient_uuid = db_obj.session.identifier.identifier
|
||||
text = db_obj.text
|
||||
|
||||
send = lambda x: signalapi.send_message_raw(recipient_uuid, x) # returns ts
|
||||
start_t = lambda: signalapi.start_typing(recipient_uuid)
|
||||
stop_t = lambda: signalapi.stop_typing(recipient_uuid)
|
||||
send = lambda x: signalapi.send_message_raw(recipient_uuid, x) # returns ts
|
||||
start_t = lambda: signalapi.start_typing(recipient_uuid)
|
||||
stop_t = lambda: signalapi.stop_typing(recipient_uuid)
|
||||
|
||||
tss = await natural.natural_send_message(
|
||||
text,
|
||||
send,
|
||||
start_t,
|
||||
stop_t,
|
||||
) # list of ts
|
||||
# result = await send_message_raw(recipient_uuid, text)
|
||||
await sync_to_async(db_obj.delete)()
|
||||
result = [x for x in tss if x] # all trueish ts
|
||||
if result: # if at least one message was sent
|
||||
ts1 = result.pop() # pick a time
|
||||
log.info(f"signal message create {text}")
|
||||
await sync_to_async(Message.objects.create)(
|
||||
user=db_obj.session.user,
|
||||
session=db_obj.session,
|
||||
custom_author="BOT",
|
||||
text=text,
|
||||
ts=ts1, # use that time in db
|
||||
)
|
||||
|
||||
tss = await natural.natural_send_message(
|
||||
text,
|
||||
send,
|
||||
start_t,
|
||||
stop_t,
|
||||
) # list of ts
|
||||
#result = await send_message_raw(recipient_uuid, text)
|
||||
await sync_to_async(db_obj.delete)()
|
||||
result = [x for x in tss if x] # all trueish ts
|
||||
if result: # if at least one message was sent
|
||||
ts1 = result.pop() # pick a time
|
||||
log.info(f"signal message create {text}")
|
||||
await sync_to_async(Message.objects.create)(
|
||||
user=db_obj.session.user,
|
||||
session=db_obj.session,
|
||||
custom_author="BOT",
|
||||
text=text,
|
||||
ts=ts1, # use that time in db
|
||||
)
|
||||
|
||||
async def process_deferred(data: dict, **kwargs):
|
||||
try:
|
||||
@@ -68,12 +70,11 @@ async def process_deferred(data: dict, **kwargs):
|
||||
except ValidationError as e:
|
||||
log.info(f"Validation Error: {e}")
|
||||
return
|
||||
|
||||
|
||||
method = validated_data.method
|
||||
user_id = validated_data.user_id
|
||||
message_id = validated_data.message_id
|
||||
|
||||
|
||||
if method == "accept_message":
|
||||
try:
|
||||
message = await sync_to_async(QueuedMessage.objects.get)(
|
||||
@@ -91,7 +92,7 @@ async def process_deferred(data: dict, **kwargs):
|
||||
else:
|
||||
log.warning(f"Protocol not supported: {message.session.identifier.service}")
|
||||
return
|
||||
elif method == "xmpp": # send xmpp message
|
||||
elif method == "xmpp": # send xmpp message
|
||||
xmpp = kwargs.get("xmpp")
|
||||
service = validated_data.service
|
||||
msg = validated_data.msg
|
||||
@@ -115,18 +116,28 @@ async def process_deferred(data: dict, **kwargs):
|
||||
continue
|
||||
|
||||
# Attach fetched file to XMPP
|
||||
xmpp_attachments.append({
|
||||
"content": fetched["content"],
|
||||
"content_type": fetched["content_type"],
|
||||
"filename": fetched["filename"],
|
||||
"size": fetched["size"],
|
||||
})
|
||||
xmpp_attachments.append(
|
||||
{
|
||||
"content": fetched["content"],
|
||||
"content_type": fetched["content_type"],
|
||||
"filename": fetched["filename"],
|
||||
"size": fetched["size"],
|
||||
}
|
||||
)
|
||||
for identifier in identifiers:
|
||||
#recipient_jid = f"{identifier.user.username}@{settings.XMPP_ADDRESS}"
|
||||
# recipient_jid = f"{identifier.user.username}@{settings.XMPP_ADDRESS}"
|
||||
user = identifier.user
|
||||
|
||||
log.info(f"Sending {len(xmpp_attachments)} attachments from Signal to XMPP.")
|
||||
await xmpp.send_from_external(user, identifier, msg, validated_data.detail, attachments=xmpp_attachments)
|
||||
log.info(
|
||||
f"Sending {len(xmpp_attachments)} attachments from Signal to XMPP."
|
||||
)
|
||||
await xmpp.send_from_external(
|
||||
user,
|
||||
identifier,
|
||||
msg,
|
||||
validated_data.detail,
|
||||
attachments=xmpp_attachments,
|
||||
)
|
||||
else:
|
||||
log.warning(f"Method not yet supported: {method}")
|
||||
return
|
||||
return
|
||||
|
||||
@@ -339,4 +339,4 @@ To make comments about being messaged late, keep in mind THEIR time zone.
|
||||
Contact: hi (their time zone is latvia and my current time is 22:30)
|
||||
Me: hi, it’s late there. what’s up?
|
||||
|
||||
"""
|
||||
"""
|
||||
|
||||
@@ -1,147 +1,12 @@
|
||||
from core.lib.prompts import bases
|
||||
from openai import AsyncOpenAI
|
||||
"""
|
||||
Prompt utility helpers.
|
||||
|
||||
Legacy summarization-based history compaction was intentionally removed.
|
||||
History is now preserved in storage and bounded only at prompt-build time.
|
||||
"""
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
from core.models import Message, ChatSession, AI, Person, Manipulation
|
||||
from core.util import logs
|
||||
import json
|
||||
from django.utils import timezone
|
||||
from core.messaging import ai
|
||||
from core.messaging.utils import messages_to_string
|
||||
|
||||
SUMMARIZE_WHEN_EXCEEDING = 10
|
||||
SUMMARIZE_BY = 5
|
||||
|
||||
MAX_SUMMARIES = 3 # Keep last 5 summaries
|
||||
|
||||
log = logs.get_logger("prompts")
|
||||
|
||||
|
||||
async def delete_messages(queryset):
|
||||
await sync_to_async(queryset.delete, thread_sensitive=True)()
|
||||
|
||||
async def truncate_and_summarize(
|
||||
chat_session: ChatSession,
|
||||
ai_obj: AI,
|
||||
):
|
||||
"""
|
||||
Summarizes messages in chunks to prevent unchecked growth.
|
||||
- Summarizes only non-summary messages.
|
||||
- Deletes older summaries if too many exist.
|
||||
- Ensures only messages belonging to `chat_session.user` are modified.
|
||||
"""
|
||||
user = chat_session.user # Store the user for ownership checks
|
||||
|
||||
# 🔹 Get non-summary messages owned by the session's user
|
||||
messages = await sync_to_async(list)(
|
||||
Message.objects.filter(session=chat_session, user=user)
|
||||
.exclude(custom_author="SUM")
|
||||
.order_by("ts")
|
||||
)
|
||||
|
||||
num_messages = len(messages)
|
||||
|
||||
if num_messages >= SUMMARIZE_WHEN_EXCEEDING:
|
||||
log.info(f"Summarizing {SUMMARIZE_BY} messages for session {chat_session.id}")
|
||||
|
||||
# Get the first `SUMMARIZE_BY` non-summary messages
|
||||
chunk_to_summarize = messages[:SUMMARIZE_BY]
|
||||
|
||||
if not chunk_to_summarize:
|
||||
log.warning("No messages available to summarize (only summaries exist). Skipping summarization.")
|
||||
return
|
||||
|
||||
last_ts = chunk_to_summarize[-1].ts # Preserve timestamp
|
||||
|
||||
# 🔹 Get past summaries, keeping only the last few (owned by the session user)
|
||||
summary_messages = await sync_to_async(list)(
|
||||
Message.objects.filter(session=chat_session, user=user, custom_author="SUM")
|
||||
.order_by("ts")
|
||||
)
|
||||
|
||||
# Delete old summaries if there are too many
|
||||
if len(summary_messages) >= MAX_SUMMARIES:
|
||||
summary_text = await summarize_conversation(chat_session, summary_messages, ai_obj, is_summary=True)
|
||||
|
||||
chat_session.summary = summary_text
|
||||
await sync_to_async(chat_session.save)()
|
||||
log.info(f"Updated ChatSession summary with {len(summary_messages)} consolidated summaries.")
|
||||
|
||||
num_to_delete = len(summary_messages) - MAX_SUMMARIES
|
||||
# await sync_to_async(
|
||||
# Message.objects.filter(session=chat_session, user=user, id__in=[msg.id for msg in summary_messages[:num_to_delete]])
|
||||
# .delete()
|
||||
# )()
|
||||
await delete_messages(
|
||||
Message.objects.filter(
|
||||
session=chat_session,
|
||||
user=user,
|
||||
id__in=[msg.id for msg in summary_messages[:num_to_delete]]
|
||||
)
|
||||
)
|
||||
log.info(f"Deleted {num_to_delete} old summaries.")
|
||||
|
||||
# 🔹 Summarize conversation chunk
|
||||
summary_text = await summarize_conversation(chat_session, chunk_to_summarize, ai_obj)
|
||||
|
||||
# 🔹 Replace old messages with the summary
|
||||
# await sync_to_async(
|
||||
# Message.objects.filter(session=chat_session, user=user, id__in=[msg.id for msg in chunk_to_summarize])
|
||||
# .delete()
|
||||
# )()
|
||||
await delete_messages(Message.objects.filter(session=chat_session, user=user, id__in=[msg.id for msg in chunk_to_summarize]))
|
||||
log.info(f"Deleted {len(chunk_to_summarize)} messages, replacing with summary.")
|
||||
|
||||
# 🔹 Store new summary message (ensuring session=user consistency)
|
||||
await sync_to_async(Message.objects.create)(
|
||||
user=user,
|
||||
session=chat_session,
|
||||
custom_author="SUM",
|
||||
text=summary_text,
|
||||
ts=last_ts, # Preserve timestamp
|
||||
)
|
||||
|
||||
# 🔹 Update ChatSession summary with latest merged summary
|
||||
# chat_session.summary = summary_text
|
||||
# await sync_to_async(chat_session.save)()
|
||||
|
||||
|
||||
|
||||
|
||||
async def summarize_conversation(
|
||||
chat_session: ChatSession,
|
||||
messages: list[Message],
|
||||
ai_obj,
|
||||
is_summary=False,
|
||||
):
|
||||
"""
|
||||
Summarizes all stored messages into a single summary.
|
||||
|
||||
- If `is_summary=True`, treats input as previous summaries and merges them while keeping detail.
|
||||
- If `is_summary=False`, summarizes raw chat messages concisely.
|
||||
"""
|
||||
|
||||
log.info(f"Summarizing messages for session {chat_session.id}")
|
||||
|
||||
# Convert messages to structured text format
|
||||
message_texts = messages_to_string(messages)
|
||||
#log.info(f"Raw messages to summarize:\n{message_texts}")
|
||||
|
||||
# Select appropriate summarization instruction
|
||||
instruction = (
|
||||
"Merge and refine these past summaries, keeping critical details and structure intact."
|
||||
if is_summary
|
||||
else "Summarize this conversation concisely, maintaining important details and tone."
|
||||
)
|
||||
|
||||
summary_prompt = [
|
||||
{"role": "system", "content": instruction},
|
||||
{"role": "user", "content": f"Conversation:\n{message_texts}\n\nProvide a clear and structured summary:"},
|
||||
]
|
||||
|
||||
# Generate AI-based summary
|
||||
summary_text = await ai.run_prompt(summary_prompt, ai_obj)
|
||||
#log.info(f"Generated Summary: {summary_text}")
|
||||
|
||||
return f"Summary: {summary_text}"
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user