forked from innovacion/Mayacontigo
65 lines
1.7 KiB
Python
65 lines
1.7 KiB
Python
import uuid
|
|
from typing import Literal
|
|
|
|
from fastapi import APIRouter
|
|
from fastapi.responses import StreamingResponse
|
|
from pydantic import BaseModel
|
|
|
|
from api import config, services
|
|
from api.agent import MayaRiesgos
|
|
|
|
router = APIRouter(prefix="/api/v1")
|
|
agent = MayaRiesgos()
|
|
|
|
|
|
class Message(BaseModel):
|
|
conversation_id: uuid.UUID
|
|
prompt: str
|
|
|
|
|
|
@router.post("/conversation")
|
|
async def create_conversation():
|
|
conversation_id = uuid.uuid4()
|
|
await services.create_conversation(conversation_id, agent.system_prompt)
|
|
return {"conversation_id": conversation_id}
|
|
|
|
|
|
@router.post("/message")
|
|
async def send(message: Message, stream: bool = False):
|
|
if stream is True:
|
|
|
|
def b64_sse(func):
|
|
async def wrapper(*args, **kwargs):
|
|
async for chunk in func(*args, **kwargs):
|
|
content = chunk.model_dump_json()
|
|
data = f"data: {content}\n\n"
|
|
yield data
|
|
|
|
return wrapper
|
|
|
|
sse_stream = b64_sse(services.stream)
|
|
generator = sse_stream(agent, message.prompt, message.conversation_id)
|
|
return StreamingResponse(generator, media_type="text/event-stream")
|
|
else:
|
|
response = await services.generate(
|
|
agent, message.prompt, message.conversation_id
|
|
)
|
|
return response
|
|
|
|
|
|
class Feedback(BaseModel):
|
|
key: str
|
|
rating: Literal["Good", "Bad", "None"]
|
|
|
|
|
|
@router.post("/feedback")
|
|
async def register_feedback(data: Feedback):
|
|
if data.rating:
|
|
langfuse = config.get_langfuse()
|
|
langfuse.score(
|
|
id=data.key + "-rating",
|
|
trace_id=data.key,
|
|
name="Rating",
|
|
value=data.rating,
|
|
)
|