This commit is contained in:
Rogelio
2025-10-13 18:16:25 +00:00
parent 739f087cef
commit 325f1ef439
415 changed files with 46870 additions and 0 deletions

View File

@@ -0,0 +1,6 @@
from banortegpt.database.mongo_memory.crud import create_conversation
from .generate_response import generate
from .stream_response import stream
__all__ = ["stream", "generate", "create_conversation"]

View File

@@ -0,0 +1,89 @@
import json
from typing import Any
from uuid import UUID
from banortegpt.database.mongo_memory import crud
from langfuse.decorators import langfuse_context, observe
from pydantic import BaseModel
from api import context as ctx
from api.agent import MayaRiesgos
class Response(BaseModel):
content: str
urls: list[str]
@observe(capture_input=False, capture_output=False)
async def generate(
agent: MayaRiesgos,
prompt: str,
conversation_id: UUID,
) -> Response:
conversation = await crud.get_conversation(conversation_id)
if conversation is None:
raise ValueError(f"Conversation with id {conversation_id} not found")
conversation.add(role="user", content=prompt)
response = await agent.generate(conversation.to_openai_format(agent.message_limit))
reference_urls, image_urls = [], []
if call := response.tool_calls:
if id := call[0].id:
ctx.tool_id.set(id)
if name := call[0].function.name:
ctx.tool_name.set(name)
ctx.tool_buffer.set(call[0].function.arguments)
else:
ctx.buffer.set(response.content)
buffer = ctx.buffer.get()
tool_buffer = ctx.tool_buffer.get()
tool_id = ctx.tool_id.get()
tool_name = ctx.tool_name.get()
if tool_id is not None:
# Si tool_buffer es un string JSON, lo convertimos a diccionario
if isinstance(tool_buffer, str):
try:
tool_args = json.loads(tool_buffer)
except json.JSONDecodeError:
tool_args = {"question": tool_buffer}
else:
tool_args = tool_buffer
response, payloads = await agent.tool_map[tool_name](**tool_args) # type: ignore
tool_call: dict[str, Any] = agent.llm.build_tool_call(
tool_id, tool_name, tool_buffer
)
tool_call_id: dict[str, Any] = agent.llm.build_tool_call_id(tool_id)
conversation.add("assistant", **tool_call)
conversation.add("tool", content=response, **tool_call_id)
response = await agent.generate(
conversation.to_openai_format(agent.message_limit), {"tools": None}
)
ctx.buffer.set(response.content)
reference_urls, image_urls = await agent.get_shareable_urls(payloads) # type: ignore
buffer = ctx.buffer.get()
if buffer is None:
raise ValueError("No buffer found")
conversation.add(role="assistant", content=buffer)
langfuse_context.update_current_trace(
name=agent.__class__.__name__,
session_id=str(conversation_id),
input=prompt,
output=buffer,
)
return Response(content=buffer, urls=reference_urls + image_urls)

View File

@@ -0,0 +1,102 @@
import json
from enum import StrEnum
from typing import TypeAlias
from uuid import UUID
from banortegpt.database.mongo_memory import crud
from langfuse.decorators import langfuse_context, observe
from pydantic import BaseModel
from api import context as ctx
from api.agent import MayaRiesgos
class ChunkType(StrEnum):
START = "start"
TEXT = "text"
REFERENCE = "reference"
IMAGE = "image"
TOOL = "tool"
END = "end"
ERROR = "error"
ContentType: TypeAlias = str | int
class ResponseChunk(BaseModel):
type: ChunkType
content: ContentType | list[ContentType] | None
@observe(capture_input=False, capture_output=False)
async def stream(agent: MayaRiesgos, prompt: str, conversation_id: UUID):
yield ResponseChunk(type=ChunkType.START, content="")
conversation = await crud.get_conversation(conversation_id)
if conversation is None:
raise ValueError(f"Conversation with id {conversation_id} not found")
conversation.add(role="user", content=prompt)
history = conversation.to_openai_format(agent.message_limit, langchain_compat=True)
async for content in agent.stream(history):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
if (tool_id := ctx.tool_id.get()) is not None:
tool_buffer = ctx.tool_buffer.get()
assert tool_buffer is not None
tool_name = ctx.tool_name.get()
assert tool_name is not None
yield ResponseChunk(type=ChunkType.TOOL, content=None)
buffer_dict = json.loads(tool_buffer)
response, payloads = await agent.tool_map[tool_name](**buffer_dict)
conversation.add(
role="assistant",
tool_calls=[
{
"id": tool_id,
"function": {
"name": tool_name,
"arguments": tool_buffer,
},
"type": "function",
}
],
)
conversation.add(role="tool", content=response, tool_call_id=tool_id)
history = conversation.to_openai_format(agent.message_limit, langchain_compat=True)
async for content in agent.stream(history, {"tools": None}):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
ref_urls, image_urls = await agent.get_shareable_urls(payloads) # type: ignore
if len(ref_urls) > 0:
yield ResponseChunk(type=ChunkType.REFERENCE, content=ref_urls)
if len(image_urls) > 0:
yield ResponseChunk(type=ChunkType.IMAGE, content=image_urls)
buffer = ctx.buffer.get()
conversation.add(role="assistant", content=buffer)
await conversation.save()
langfuse_context.update_current_trace(
name=agent.__class__.__name__,
session_id=str(conversation_id),
input=prompt,
output=buffer,
)
yield ResponseChunk(
type=ChunkType.END, content=langfuse_context.get_current_trace_id()
)