forked from innovacion/Mayacontigo
90 lines
2.6 KiB
Python
90 lines
2.6 KiB
Python
import json
|
|
from typing import Any
|
|
from uuid import UUID
|
|
|
|
from banortegpt.database.mongo_memory import crud
|
|
from langfuse.decorators import langfuse_context, observe
|
|
from pydantic import BaseModel
|
|
|
|
from api import context as ctx
|
|
from api.agent import MayaRiesgos
|
|
|
|
|
|
class Response(BaseModel):
|
|
content: str
|
|
urls: list[str]
|
|
|
|
|
|
@observe(capture_input=False, capture_output=False)
|
|
async def generate(
|
|
agent: MayaRiesgos,
|
|
prompt: str,
|
|
conversation_id: UUID,
|
|
) -> Response:
|
|
conversation = await crud.get_conversation(conversation_id)
|
|
|
|
if conversation is None:
|
|
raise ValueError(f"Conversation with id {conversation_id} not found")
|
|
|
|
conversation.add(role="user", content=prompt)
|
|
|
|
response = await agent.generate(conversation.to_openai_format(agent.message_limit))
|
|
|
|
reference_urls, image_urls = [], []
|
|
|
|
if call := response.tool_calls:
|
|
if id := call[0].id:
|
|
ctx.tool_id.set(id)
|
|
if name := call[0].function.name:
|
|
ctx.tool_name.set(name)
|
|
ctx.tool_buffer.set(call[0].function.arguments)
|
|
else:
|
|
ctx.buffer.set(response.content)
|
|
|
|
buffer = ctx.buffer.get()
|
|
tool_buffer = ctx.tool_buffer.get()
|
|
tool_id = ctx.tool_id.get()
|
|
tool_name = ctx.tool_name.get()
|
|
|
|
if tool_id is not None:
|
|
# Si tool_buffer es un string JSON, lo convertimos a diccionario
|
|
if isinstance(tool_buffer, str):
|
|
try:
|
|
tool_args = json.loads(tool_buffer)
|
|
except json.JSONDecodeError:
|
|
tool_args = {"question": tool_buffer}
|
|
else:
|
|
tool_args = tool_buffer
|
|
|
|
response, payloads = await agent.tool_map[tool_name](**tool_args) # type: ignore
|
|
|
|
tool_call: dict[str, Any] = agent.llm.build_tool_call(
|
|
tool_id, tool_name, tool_buffer
|
|
)
|
|
tool_call_id: dict[str, Any] = agent.llm.build_tool_call_id(tool_id)
|
|
|
|
conversation.add("assistant", **tool_call)
|
|
conversation.add("tool", content=response, **tool_call_id)
|
|
|
|
response = await agent.generate(
|
|
conversation.to_openai_format(agent.message_limit), {"tools": None}
|
|
)
|
|
ctx.buffer.set(response.content)
|
|
|
|
reference_urls, image_urls = await agent.get_shareable_urls(payloads) # type: ignore
|
|
|
|
buffer = ctx.buffer.get()
|
|
if buffer is None:
|
|
raise ValueError("No buffer found")
|
|
|
|
conversation.add(role="assistant", content=buffer)
|
|
|
|
langfuse_context.update_current_trace(
|
|
name=agent.__class__.__name__,
|
|
session_id=str(conversation_id),
|
|
input=prompt,
|
|
output=buffer,
|
|
)
|
|
|
|
return Response(content=buffer, urls=reference_urls + image_urls)
|