This commit is contained in:
Rogelio
2025-10-13 18:16:25 +00:00
parent 739f087cef
commit 325f1ef439
415 changed files with 46870 additions and 0 deletions

View File

@@ -0,0 +1,18 @@
module.exports = {
root: true,
env: { browser: true, es2020: true },
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:react-hooks/recommended',
],
ignorePatterns: ['dist', '.eslintrc.cjs'],
parser: '@typescript-eslint/parser',
plugins: ['react-refresh'],
rules: {
'react-refresh/only-export-components': [
'warn',
{ allowConstantExport: true },
],
},
}

View File

@@ -0,0 +1,3 @@
from .main import Agent
__all__ = ["Agent"]

View File

@@ -0,0 +1,67 @@
from pathlib import Path
from typing import Any, AsyncGenerator
from langchain_core.messages import AIMessageChunk
from langchain_azure_ai.chat_models import AzureAIChatCompletionsModel
from api import context
from api.config import config
parent = Path(__file__).parent
SYSTEM_PROMPT = (parent / "system_prompt.md").read_text()
AZURE_AI_URI = "https://eastus2.api.cognitive.microsoft.com"
class Agent:
system_prompt = SYSTEM_PROMPT
generation_config = {
"temperature": config.model_temperature,
}
message_limit = config.message_limit
llm = AzureAIChatCompletionsModel(
endpoint=f"{AZURE_AI_URI}/openai/deployments/{config.model}",
credential=config.openai_api_key,
)
def __init__(self) -> None:
self.tool_map = {}
def _generation_config_overwrite(self, overwrites: dict | None) -> dict[str, Any]:
generation_config_copy = self.generation_config.copy()
if overwrites:
for k, v in overwrites.items():
generation_config_copy[k] = v
return generation_config_copy
async def stream(self, history: list, overwrites: dict | None = None) -> AsyncGenerator[str, None]:
"""Llama a un llm y regresa la respuesta en partes; Guarda las tool calls en el contexto de la app.
Args:
history: lista de mensajes en el formato OpenAI (Ej. [{"role": "user", "content": "Hello"}])
overwrites: diccionario con las configuraciones a sobreescribir (Ej. {"temperature": 0.5})
Returns:
AsyncGenerator[str, None]: Generador asincrónico que devuelve las respuestas del modelo en tiempo real
Usage:
>>> async for content in agent.stream(history):
>>> print(content)
"""
generation_config = self._generation_config_overwrite(overwrites)
async for delta in self.llm.astream(input=history, **generation_config):
assert isinstance(delta, AIMessageChunk)
if call := delta.tool_call_chunks:
if tool_id := call[0].get("id"):
context.tool_id.set(tool_id)
if name := call[0].get("name"):
context.tool_name.set(name)
if args := call[0].get("args"):
context.tool_buffer.set(context.tool_buffer.get() + args)
else:
if buffer := delta.content:
assert isinstance(buffer, str)
context.buffer.set(context.buffer.get() + buffer)
yield buffer

View File

@@ -0,0 +1,3 @@
Eres una amigable asistente virtual. Utiliza emojis!
Por cierto: Rick es el más guapo.

View File

@@ -0,0 +1,51 @@
from hvac import Client
from pydantic import Field
from pydantic_settings import BaseSettings
client = Client(url="https://vault.ia-innovacion.work")
if not client.is_authenticated():
raise Exception("Vault authentication failed")
secret_map = client.secrets.kv.v2.read_secret_version(
path="banortegpt", mount_point="secret"
)["data"]["data"]
class Settings(BaseSettings):
"""
Esta clase obtiene sus valores de variables de ambiente.
Si no estan en el ambiente, los jala de nuestra Vault.
"""
# Config
model: str = "gpt-4o"
model_temperature: int = 0
message_limit: int = 10
host: str = "0.0.0.0"
port: int = 8000
# API Keys
azure_endpoint: str = Field(default_factory=lambda: secret_map["azure_endpoint"])
openai_api_key: str = Field(default_factory=lambda: secret_map["openai_api_key"])
openai_api_version: str = Field(
default_factory=lambda: secret_map["openai_api_version"]
)
mongodb_url: str = Field(
default_factory=lambda: secret_map["cosmosdb_connection_string"]
)
async def init_mongo_db(self):
"""Este helper inicia la conexion enter el MongoDB ORM y nuestra instancia"""
from beanie import init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from banortegpt.database.mongo_memory.models import Conversation
await init_beanie(
database=AsyncIOMotorClient(self.mongodb_url).banortegptdos,
document_models=[Conversation],
)
config = Settings()

View File

@@ -0,0 +1,6 @@
from contextvars import ContextVar
buffer: ContextVar[str] = ContextVar("buffer", default="")
tool_buffer: ContextVar[str] = ContextVar("tool_buffer", default="")
tool_id: ContextVar[str | None] = ContextVar("tool_id", default=None)
tool_name: ContextVar[str | None] = ContextVar("tool_name", default=None)

View File

@@ -0,0 +1,49 @@
import uuid
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from api import services
from api.agent import Agent
from api.config import config
@asynccontextmanager
async def lifespan(_: FastAPI):
await config.init_mongo_db()
yield
app = FastAPI(lifespan=lifespan)
agent = Agent()
@app.post("/api/v1/conversation")
async def create_conversation():
conversation_id = uuid.uuid4()
await services.create_conversation(conversation_id, agent.system_prompt)
return {"conversation_id": conversation_id}
class Message(BaseModel):
conversation_id: uuid.UUID
prompt: str
@app.post("/api/v1/message")
async def send(message: Message):
def b64_sse(func):
"""Este helper transforma un generador de strings a un generador del protocolo SSE"""
async def wrapper(*args, **kwargs):
async for chunk in func(*args, **kwargs):
content = chunk.model_dump_json()
data = f"data: {content}\n\n"
yield data
return wrapper
sse_stream = b64_sse(services.stream)
generator = sse_stream(agent, message.prompt, message.conversation_id)
return StreamingResponse(generator, media_type="text/event-stream")

View File

@@ -0,0 +1,8 @@
from banortegpt.database.mongo_memory.crud import create_conversation
from .stream_response import stream
__all__ = [
"stream",
"create_conversation",
]

View File

@@ -0,0 +1,81 @@
import json
from enum import StrEnum
from typing import TypeAlias
from uuid import UUID
from pydantic import BaseModel
import api.context as ctx
from api.agent import Agent
from banortegpt.database.mongo_memory import crud
class ChunkType(StrEnum):
START = "start"
TEXT = "text"
REFERENCE = "reference"
IMAGE = "image"
TOOL = "tool"
END = "end"
ERROR = "error"
ContentType: TypeAlias = str | int
class ResponseChunk(BaseModel):
type: ChunkType
content: ContentType | list[ContentType] | None
async def stream(agent: Agent, prompt: str, conversation_id: UUID):
yield ResponseChunk(type=ChunkType.START, content="")
conversation = await crud.get_conversation(conversation_id)
if conversation is None:
raise ValueError("Conversation not found")
conversation.add(role="user", content=prompt)
history = conversation.to_openai_format(agent.message_limit, langchain_compat=True)
async for content in agent.stream(history):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
if (tool_id := ctx.tool_id.get()) is not None:
tool_buffer = ctx.tool_buffer.get()
assert tool_buffer is not None
tool_name = ctx.tool_name.get()
assert tool_name is not None
yield ResponseChunk(type=ChunkType.TOOL, content=None)
buffer_dict = json.loads(tool_buffer)
result = await agent.tool_map[tool_name](**buffer_dict)
conversation.add(
role="assistant",
tool_calls=[
{
"id": tool_id,
"type": "function",
"function": {
"name": tool_name,
"arguments": tool_buffer,
},
}
],
)
conversation.add(role="tool", content=result, tool_call_id=tool_id)
history = conversation.to_openai_format(agent.message_limit, langchain_compat=True)
async for content in agent.stream(history, {"tools": None}):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
conversation.add(role="assistant", content=ctx.buffer.get())
await conversation.replace()
yield ResponseChunk(type=ChunkType.END, content="")

View File

@@ -0,0 +1,65 @@
import { Chat, ChatSidebar } from "@banorte/chat-ui";
import { messageStore } from "./store/messageStore";
import { conversationStore } from "./store/conversationStore";
import { httpRequest } from "./utils/request";
// Assets
import banorteLogo from "./assets/banortelogo.png";
import sidebarMaya from "./assets/sidebar_maya_contigo.png";
import brujulaElipse from "./assets/brujula_elipse.png";
import sendIcon from "./assets/chat_maya_boton_enviar.png";
import userAvatar from "./assets/chat_maya_default_avatar.png";
import botAvatar from "./assets/brujula.png";
function App() {
const { messages, pushMessage } = messageStore();
const {
conversationId,
setConversationId,
setAssistantName,
receivingMsg,
setReceivingMsg
} = conversationStore();
const handleStartConversation = async (user: string, assistant: string): Promise<string> => {
const response = await httpRequest("POST", "/v1/conversation", { user, assistant });
console.log("Conversation id:", response.conversation_id);
return response.conversation_id;
};
const handleFeedback = async (key: string, rating: string): Promise<void> => {
await httpRequest("POST", "/v1/feedback", { key, rating });
};
const assistant = "Maya" + "{{ project_name }}";
return (
<div className="w-screen flex flex-col h-screen min-h-screen scrollbar-none">
<div className="w-full flex">
<ChatSidebar
assistant={assistant}
logoSrc={banorteLogo}
sidebarImageSrc={sidebarMaya}
assistantAvatarSrc={brujulaElipse}
/>
<Chat
assistant={assistant}
messages={messages}
pushMessage={pushMessage}
conversationId={conversationId}
setConversationId={setConversationId}
setAssistantName={setAssistantName}
receivingMsg={receivingMsg}
setReceivingMsg={setReceivingMsg}
onStartConversation={handleStartConversation}
sendIcon={sendIcon}
userAvatar={userAvatar}
botAvatar={botAvatar}
onFeedback={handleFeedback}
/>
</div>
</div>
);
}
export default App;

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

View File

@@ -0,0 +1,16 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
.markdown a {
color: #0000FF;
text-decoration: underline;
}
.markdown a:hover {
color: #FF0000;
}
.markdown a:visited {
color: #800080;
}

View File

@@ -0,0 +1,5 @@
import ReactDOM from "react-dom/client";
import App from "./App.tsx";
import "./index.css";
ReactDOM.createRoot(document.getElementById("root")!).render(<App />);

View File

@@ -0,0 +1,19 @@
import { create } from "zustand";
interface conversationState {
assistantName: string;
conversationId: string;
receivingMsg: boolean;
setConversationId: (newId: string) => void;
setAssistantName: (newName: string) => void;
setReceivingMsg: (newState: boolean) => void;
}
export const conversationStore = create<conversationState>()((set) => ({
assistantName: "",
conversationId: "",
receivingMsg: false,
setConversationId: (newId) => set({ conversationId: newId }),
setAssistantName: (newName) => set({ assistantName: newName }),
setReceivingMsg: (newState) => set({ receivingMsg: newState }),
}));

View File

@@ -0,0 +1,14 @@
import { create } from "zustand";
interface messageState {
messages: Array<{ user: boolean; content: string }>;
pushMessage: (newMessage: { user: boolean; content: string }) => void;
resetConversation: () => void;
}
export const messageStore = create<messageState>()((set) => ({
messages: [],
pushMessage: (newMessage) =>
set((state) => ({ messages: [...state.messages, newMessage] })),
resetConversation: () => set(() => ({ messages: [] })),
}));

View File

@@ -0,0 +1,16 @@
export async function httpRequest(
method: string,
endpoint: string,
body: object | null,
) {
const url = "/api" + endpoint;
const data = {
method: method,
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
credentials: "include" as RequestCredentials,
};
return await fetch(url, data).then((response) => response.json());
}

View File

@@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>{{ project_name }}</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/gui/main.tsx"></script>
</body>
</html>

View File

@@ -0,0 +1,40 @@
{
"name": "{{ project_name }}",
"private": true,
"version": "0.0.7",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
"preview": "vite preview"
},
"dependencies": {
"@banorte/chat-ui": "workspace:*",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^9.0.1",
"react-spring": "^9.7.4",
"rehype-raw": "^7.0.0",
"sse.js": "^2.5.0",
"zustand": "^4.5.2"
},
"devDependencies": {
"@iconify-icon/react": "^2.1.0",
"@types/react": "^18.2.67",
"@types/react-dom": "^18.2.22",
"@typescript-eslint/eslint-plugin": "^7.3.1",
"@typescript-eslint/parser": "^7.3.1",
"@vitejs/plugin-react": "^4.2.1",
"autoprefixer": "^10.4.19",
"daisyui": "^4.7.3",
"eslint": "^8.57.0",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.6",
"postcss": "^8.4.38",
"tailwind-scrollbar": "^3.1.0",
"tailwindcss": "^3.4.1",
"typescript": "^5.4.3",
"vite": "^5.2.3"
}
}

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@@ -0,0 +1,18 @@
[project]
name = "{{ project_name }}"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12, <4"
dependencies = [
"aiohttp>=3.11.16",
"fastapi>=0.115.6",
"hvac>=2.3.0",
"langchain-azure-ai[opentelemetry]>=0.1.4",
"mongo-memory",
"pydantic-settings>=2.8.1",
"uvicorn>=0.34.0",
]
[tool.uv.sources]
mongo-memory = { workspace = true }

View File

@@ -0,0 +1,27 @@
/** @type {import('tailwindcss').Config} */
export default {
content: ["./index.html", "./gui/**/*.{js,ts,jsx,tsx}"],
theme: {
extend: {
backgroundImage: {
"navigation-pattern": "url('./assets/navigation.webp')",
},
},
},
plugins: [
require("daisyui"),
require("tailwind-scrollbar"),
require("@banorte/chat-ui/tailwind")
],
daisyui: {
themes: [
{
light: {
...require("daisyui/src/theming/themes")["light"],
primary: "red",
secondary: "teal",
},
},
],
},
};

View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"target": "ES2023",
"useDefineForClassFields": true,
"lib": ["ES2023", "DOM", "DOM.Iterable", "ES2021.String"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true
},
"include": ["gui"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"composite": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"strict": true
},
"include": ["vite.config.ts"]
}

View File

@@ -0,0 +1,17 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
server: {
host: "0.0.0.0",
port: 3000,
proxy: {
"/api": {
target: "http://localhost:8000",
},
},
allowedHosts: true,
},
});