This commit is contained in:
Rogelio
2025-10-13 18:16:25 +00:00
parent 739f087cef
commit 325f1ef439
415 changed files with 46870 additions and 0 deletions

18
apps/ocp/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,18 @@
module.exports = {
root: true,
env: { browser: true, es2020: true },
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:react-hooks/recommended',
],
ignorePatterns: ['dist', '.eslintrc.cjs'],
parser: '@typescript-eslint/parser',
plugins: ['react-refresh'],
rules: {
'react-refresh/only-export-components': [
'warn',
{ allowConstantExport: true },
],
},
}

6
apps/ocp/README.md Normal file
View File

@@ -0,0 +1,6 @@
Eres MayaBursatil, una muy amigable y símpatica asistente virtual del departamento de contraloria bursatil de Banorte.
Tu objetivo es responder preguntas de usuarios de manera informativa y empatica.
Para cada pregunta, utiliza la herramienta 'get_information' para obtener informacion de nuestro FAQ.
Utiliza la informacion para responder la pregunta del usuario.
Utiliza emojis.
Si no puedes responder la pregunta basado en la informacion del FAQ, responde con el contenido en el FALLBACK.

302
apps/ocp/api.svg Normal file
View File

@@ -0,0 +1,302 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 12.2.1 (20241206.2353)
-->
<!-- Title: G Pages: 1 -->
<svg width="771pt" height="666pt"
viewBox="0.00 0.00 771.08 666.44" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 662.44)">
<title>G</title><style>.edge>path:hover{stroke-width:8}</style>
<polygon fill="white" stroke="none" points="-4,4 -4,-662.44 767.08,-662.44 767.08,4 -4,4"/>
<!-- api_agent -->
<g id="node1" class="node">
<title>api_agent</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#d92626" stroke="black" cx="392.35" cy="-336.18" rx="32.99" ry="18"/>
<text text-anchor="middle" x="392.35" y="-332.3" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.agent</text>
</g>
<!-- api_services_create_conversation -->
<g id="node7" class="node">
<title>api_services_create_conversation</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#b83d3d" stroke="black" cx="377.35" cy="-173.52" rx="73.36" ry="29.52"/>
<text text-anchor="middle" x="377.35" y="-180.9" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.</text>
<text text-anchor="middle" x="377.35" y="-169.65" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">services.</text>
<text text-anchor="middle" x="377.35" y="-158.4" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">create_conversation</text>
</g>
<!-- api_agent&#45;&gt;api_services_create_conversation -->
<g id="edge1" class="edge">
<title>api_agent&#45;&gt;api_services_create_conversation</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M316.35,-259.61C300.08,-244.24 313.57,-223.97 331.97,-207.07"/>
<polygon fill="#d92626" stroke="black" points="334.23,-209.73 339.52,-200.56 329.66,-204.43 334.23,-209.73"/>
</g>
<!-- api_services_generate_response -->
<g id="node8" class="node">
<title>api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#993333" stroke="black" cx="216.35" cy="-173.52" rx="69.65" ry="29.52"/>
<text text-anchor="middle" x="216.35" y="-180.9" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.</text>
<text text-anchor="middle" x="216.35" y="-169.65" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">services.</text>
<text text-anchor="middle" x="216.35" y="-158.4" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">generate_response</text>
</g>
<!-- api_agent&#45;&gt;api_services_generate_response -->
<g id="edge2" class="edge">
<title>api_agent&#45;&gt;api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M316.35,-259.61C297.41,-241.72 275.45,-222.84 256.94,-207.42"/>
<polygon fill="#d92626" stroke="black" points="259.26,-204.8 249.32,-201.12 254.79,-210.19 259.26,-204.8"/>
</g>
<!-- api_services_stream_response -->
<g id="node9" class="node">
<title>api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#993333" stroke="black" cx="64.35" cy="-173.52" rx="64.35" ry="29.52"/>
<text text-anchor="middle" x="64.35" y="-180.9" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.</text>
<text text-anchor="middle" x="64.35" y="-169.65" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">services.</text>
<text text-anchor="middle" x="64.35" y="-158.4" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">stream_response</text>
</g>
<!-- api_agent&#45;&gt;api_services_stream_response -->
<g id="edge3" class="edge">
<title>api_agent&#45;&gt;api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M316.35,-259.61C255.7,-202.31 216.02,-230.81 137.35,-203.04 132.6,-201.37 127.7,-199.58 122.79,-197.75"/>
<polygon fill="#d92626" stroke="black" points="124.09,-194.5 113.5,-194.23 121.61,-201.05 124.09,-194.5"/>
<path fill="none" stroke="black" d="M376.73,-319.85C361.44,-304.89 337.44,-281.54 316.35,-261.61"/>
</g>
<!-- api_agent_main -->
<g id="node2" class="node">
<title>api_agent_main</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#b03a3a" stroke="black" cx="392.35" cy="-411.74" rx="47.19" ry="18"/>
<text text-anchor="middle" x="392.35" y="-407.87" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.agent.main</text>
</g>
<!-- api_agent_main&#45;&gt;api_agent -->
<g id="edge4" class="edge">
<title>api_agent_main&#45;&gt;api_agent</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M392.35,-393.31C392.35,-385.06 392.35,-375 392.35,-365.69"/>
<polygon fill="#b03a3a" stroke="black" points="395.85,-366 392.35,-356 388.85,-366 395.85,-366"/>
</g>
<!-- api_config -->
<g id="node3" class="node">
<title>api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#ac2b2b" stroke="black" cx="478.35" cy="-524.31" rx="34.32" ry="18"/>
<text text-anchor="middle" x="478.35" y="-520.44" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.config</text>
</g>
<!-- api_config&#45;&gt;api_agent_main -->
<g id="edge5" class="edge">
<title>api_config&#45;&gt;api_agent_main</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M481.03,-506.17C482.03,-495.58 482.15,-481.84 478.35,-470.31"/>
<path fill="none" stroke="black" d="M478.35,-469.31C472.17,-450.58 455.34,-437.49 438.19,-428.66"/>
<polygon fill="#ac2b2b" stroke="black" points="440.04,-425.66 429.5,-424.58 437.06,-432 440.04,-425.66"/>
</g>
<!-- api_server -->
<g id="node5" class="node">
<title>api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#8f3d3d" stroke="black" cx="374.35" cy="-18" rx="34.76" ry="18"/>
<text text-anchor="middle" x="374.35" y="-14.12" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.server</text>
</g>
<!-- api_config&#45;&gt;api_server -->
<g id="edge6" class="edge">
<title>api_config&#45;&gt;api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M478.35,-469.31C459.95,-413.54 448.83,-387.94 478.35,-337.18"/>
</g>
<!-- api_context -->
<g id="node4" class="node">
<title>api_context</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#db2f2f" stroke="black" cx="237.35" cy="-524.31" rx="37.87" ry="18"/>
<text text-anchor="middle" x="237.35" y="-520.44" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.context</text>
</g>
<!-- api_context&#45;&gt;api_agent_main -->
<g id="edge7" class="edge">
<title>api_context&#45;&gt;api_agent_main</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M237.35,-469.31C228.57,-423.32 292.37,-449.38 336.35,-433.31 340.27,-431.88 344.36,-430.38 348.45,-428.88"/>
<polygon fill="#db2f2f" stroke="black" points="349.58,-432.19 357.77,-425.46 347.17,-425.62 349.58,-432.19"/>
</g>
<!-- api_context&#45;&gt;api_services_generate_response -->
<g id="edge8" class="edge">
<title>api_context&#45;&gt;api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M237.35,-469.31C230.57,-433.78 233.79,-422.89 218.35,-390.18 205.75,-363.49 169.42,-365.3 178.35,-337.18"/>
</g>
<!-- api_context&#45;&gt;api_services_stream_response -->
<g id="edge9" class="edge">
<title>api_context&#45;&gt;api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M237.35,-469.31C217.31,-379 91.83,-504.15 32.35,-433.31 20.02,-418.63 23.23,-407.04 32.35,-390.18 52.81,-352.33 102.34,-378.77 113.35,-337.18"/>
<path fill="none" stroke="black" d="M238.92,-505.98C239.48,-495.56 239.51,-482.07 237.35,-470.31"/>
<path fill="none" stroke="black" d="M132.35,-259.61C122.06,-242.23 108.51,-224.53 96.22,-209.82"/>
<polygon fill="#db2f2f" stroke="black" points="98.9,-207.57 89.75,-202.22 93.57,-212.1 98.9,-207.57"/>
<path fill="none" stroke="black" d="M113.35,-335.18C120.97,-302.28 149.55,-290.67 132.35,-261.61"/>
</g>
<!-- api_services -->
<g id="node6" class="node">
<title>api_services</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#b03a3a" stroke="black" cx="216.35" cy="-90" rx="40.53" ry="18"/>
<text text-anchor="middle" x="216.35" y="-86.12" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">api.services</text>
</g>
<!-- api_services&#45;&gt;api_server -->
<g id="edge10" class="edge">
<title>api_services&#45;&gt;api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M244.23,-76.65C270.39,-65.06 309.65,-47.66 338.2,-35.01"/>
<polygon fill="#b03a3a" stroke="black" points="339.46,-38.28 347.19,-31.03 336.63,-31.88 339.46,-38.28"/>
</g>
<!-- api_services_create_conversation&#45;&gt;api_services -->
<g id="edge11" class="edge">
<title>api_services_create_conversation&#45;&gt;api_services</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M332.5,-149.81C307.21,-137.01 276.04,-121.23 252.36,-109.24"/>
<polygon fill="#b83d3d" stroke="black" points="254.23,-106.26 243.73,-104.87 251.07,-112.51 254.23,-106.26"/>
</g>
<!-- api_services_generate_response&#45;&gt;api_services -->
<g id="edge12" class="edge">
<title>api_services_generate_response&#45;&gt;api_services</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M216.35,-143.55C216.35,-135.86 216.35,-127.56 216.35,-119.88"/>
<polygon fill="#993333" stroke="black" points="219.85,-120 216.35,-110 212.85,-120 219.85,-120"/>
</g>
<!-- api_services_stream_response&#45;&gt;api_services -->
<g id="edge13" class="edge">
<title>api_services_stream_response&#45;&gt;api_services</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M105.48,-150.46C129.27,-137.7 158.87,-121.82 181.51,-109.69"/>
<polygon fill="#993333" stroke="black" points="182.88,-112.92 190.04,-105.11 179.57,-106.75 182.88,-112.92"/>
</g>
<!-- beanie -->
<g id="node10" class="node">
<title>beanie</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#c0a040" stroke="black" cx="334.35" cy="-636.88" rx="27" ry="18"/>
<text text-anchor="middle" x="334.35" y="-633" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">beanie</text>
</g>
<!-- beanie&#45;&gt;api_config -->
<g id="edge14" class="edge">
<title>beanie&#45;&gt;api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M353.79,-623.84C359.05,-620.84 364.83,-617.78 370.35,-615.31 416.54,-594.66 487.83,-629.01 478.35,-579.31"/>
<path fill="none" stroke="black" d="M478.35,-578.31C476.86,-570.51 476.37,-561.94 476.38,-554.01"/>
<polygon fill="#c0a040" stroke="black" points="479.88,-554.26 476.67,-544.16 472.88,-554.06 479.88,-554.26"/>
</g>
<!-- fastapi -->
<g id="node11" class="node">
<title>fastapi</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#80b34c" stroke="black" cx="533.35" cy="-173.52" rx="27" ry="18"/>
<text text-anchor="middle" x="533.35" y="-169.65" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fastapi</text>
</g>
<!-- fastapi&#45;&gt;api_server -->
<g id="edge15" class="edge">
<title>fastapi&#45;&gt;api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M538.37,-155.41C542.47,-137.8 545.77,-110.27 533.35,-91"/>
</g>
<!-- fastapi_responses -->
<g id="node12" class="node">
<title>fastapi_responses</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#85c247" stroke="black" cx="568.35" cy="-260.61" rx="43.13" ry="21.57"/>
<text text-anchor="middle" x="568.35" y="-262.36" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fastapi.</text>
<text text-anchor="middle" x="568.35" y="-251.11" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">responses</text>
</g>
<!-- fastapi_responses&#45;&gt;api_server -->
<g id="edge16" class="edge">
<title>fastapi_responses&#45;&gt;api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M572.2,-239.02C575.77,-215.48 579.2,-176.04 569.35,-144 560.98,-116.78 548.78,-114.93 533.35,-91"/>
</g>
<!-- fastapi_responses&#45;&gt;fastapi -->
<g id="edge17" class="edge">
<title>fastapi_responses&#45;&gt;fastapi</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M559.87,-239.01C555.26,-227.79 549.49,-213.76 544.51,-201.66"/>
<polygon fill="#85c247" stroke="black" points="547.83,-200.54 540.79,-192.62 541.36,-203.2 547.83,-200.54"/>
</g>
<!-- hvac -->
<g id="node13" class="node">
<title>hvac</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#40c060" stroke="black" cx="406.35" cy="-636.88" rx="27" ry="18"/>
<text text-anchor="middle" x="406.35" y="-633" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">hvac</text>
</g>
<!-- hvac&#45;&gt;api_config -->
<g id="edge18" class="edge">
<title>hvac&#45;&gt;api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M425.31,-623.52C447.07,-608.98 479.62,-585.99 478.35,-579.31"/>
</g>
<!-- langfuse -->
<g id="node14" class="node">
<title>langfuse</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#47c2c2" stroke="black" cx="178.35" cy="-411.74" rx="31.21" ry="18"/>
<text text-anchor="middle" x="178.35" y="-407.87" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">langfuse</text>
</g>
<!-- langfuse&#45;&gt;api_services_generate_response -->
<g id="edge19" class="edge">
<title>langfuse&#45;&gt;api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M175.44,-393.35C173.57,-378.15 172.47,-355.68 178.35,-337.18"/>
</g>
<!-- langfuse&#45;&gt;api_services_stream_response -->
<g id="edge20" class="edge">
<title>langfuse&#45;&gt;api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M155.63,-399.07C134.19,-386.24 106.28,-363.87 113.35,-337.18"/>
</g>
<!-- langfuse_decorators -->
<g id="node15" class="node">
<title>langfuse_decorators</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#47c2c2" stroke="black" cx="85.35" cy="-411.74" rx="43.66" ry="21.57"/>
<text text-anchor="middle" x="85.35" y="-413.49" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">langfuse.</text>
<text text-anchor="middle" x="85.35" y="-402.24" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">decorators</text>
</g>
<!-- langfuse_decorators&#45;&gt;api_services_generate_response -->
<g id="edge21" class="edge">
<title>langfuse_decorators&#45;&gt;api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M178.35,-335.18C189.48,-300.1 211.83,-298.13 216.35,-261.61"/>
<path fill="none" stroke="black" d="M216.35,-259.61C218.15,-245.06 218.55,-229.01 218.37,-214.81"/>
<polygon fill="#47c2c2" stroke="black" points="221.87,-214.81 218.14,-204.89 214.88,-214.97 221.87,-214.81"/>
<path fill="none" stroke="black" d="M118.02,-397.05C141.07,-385.18 169.38,-365.43 178.35,-337.18"/>
</g>
<!-- langfuse_decorators&#45;&gt;api_services_stream_response -->
<g id="edge22" class="edge">
<title>langfuse_decorators&#45;&gt;api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M94.5,-390.2C100.65,-375.65 108.49,-355.54 113.35,-337.18"/>
</g>
<!-- motor -->
<g id="node16" class="node">
<title>motor</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#4766c2" stroke="black" cx="478.35" cy="-636.88" rx="27" ry="18"/>
<text text-anchor="middle" x="478.35" y="-633" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">motor</text>
</g>
<!-- motor&#45;&gt;api_config -->
<g id="edge23" class="edge">
<title>motor&#45;&gt;api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M480.01,-618.64C480.7,-607.35 480.83,-592.33 478.35,-579.31"/>
</g>
<!-- motor_motor_asyncio -->
<g id="node17" class="node">
<title>motor_motor_asyncio</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#4c66b3" stroke="black" cx="579.35" cy="-636.88" rx="56.39" ry="21.57"/>
<text text-anchor="middle" x="579.35" y="-638.63" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">motor.</text>
<text text-anchor="middle" x="579.35" y="-627.38" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">motor_asyncio</text>
</g>
<!-- motor_motor_asyncio&#45;&gt;api_config -->
<g id="edge24" class="edge">
<title>motor_motor_asyncio&#45;&gt;api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M539.82,-621.03C512.68,-609.65 481.06,-593.52 478.35,-579.31"/>
</g>
<!-- pydantic -->
<g id="node18" class="node">
<title>pydantic</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#7f26d9" stroke="black" cx="296.35" cy="-411.74" rx="31.21" ry="18"/>
<text text-anchor="middle" x="296.35" y="-407.87" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">pydantic</text>
</g>
<!-- pydantic&#45;&gt;api_server -->
<g id="edge25" class="edge">
<title>pydantic&#45;&gt;api_server</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M533.35,-89C509.61,-49.53 457.62,-32.24 419.53,-24.71"/>
<polygon fill="#7f26d9" stroke="black" points="420.15,-21.27 409.69,-22.95 418.91,-28.16 420.15,-21.27"/>
<path fill="none" stroke="black" d="M318.54,-398.72C324.27,-395.78 330.48,-392.75 336.35,-390.18 398.03,-363.11 449.72,-398.16 478.35,-337.18"/>
<path fill="none" stroke="black" d="M478.35,-335.18C514.63,-257.88 468.24,-224.27 497.35,-144 507.05,-117.23 548.02,-115.4 533.35,-91"/>
</g>
<!-- pydantic&#45;&gt;api_services_generate_response -->
<g id="edge26" class="edge">
<title>pydantic&#45;&gt;api_services_generate_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M216.35,-335.18C196.28,-309.36 212.33,-294.06 216.35,-261.61"/>
</g>
<!-- pydantic&#45;&gt;api_services_stream_response -->
<g id="edge27" class="edge">
<title>pydantic&#45;&gt;api_services_stream_response</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M216.35,-335.18C185.9,-295.99 157.63,-304.31 132.35,-261.61"/>
<path fill="none" stroke="black" d="M277.86,-396.74C260.49,-382.94 234.52,-360.57 216.35,-337.18"/>
</g>
<!-- pydantic_settings -->
<g id="node19" class="node">
<title>pydantic_settings</title><style>.edge>path:hover{stroke-width:8}</style>
<ellipse fill="#b6539d" stroke="black" cx="708.35" cy="-636.88" rx="54.73" ry="18"/>
<text text-anchor="middle" x="708.35" y="-633" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#ffffff">pydantic_settings</text>
</g>
<!-- pydantic_settings&#45;&gt;api_config -->
<g id="edge28" class="edge">
<title>pydantic_settings&#45;&gt;api_config</title><style>.edge>path:hover{stroke-width:8}</style>
<path fill="none" stroke="black" d="M671.3,-623.3C662.51,-620.5 653.14,-617.67 644.35,-615.31 607.89,-605.52 485.42,-616.39 478.35,-579.31"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 19 KiB

13
apps/ocp/api/__init__.py Normal file
View File

@@ -0,0 +1,13 @@
import os
import logging
import logfire
logfire.configure(service_name="ChatOCP", send_to_logfire=False)
logging.basicConfig(handlers=[logfire.LogfireLoggingHandler()])
log_level = os.environ.get("LOG_LEVEL", "WARNING")
numeric_level = getattr(logging, log_level.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError("Invalid log level: %s" % log_level)
logging.getLogger("api").setLevel(numeric_level)

View File

@@ -0,0 +1,5 @@
from .main import MayaOCP
agent = MayaOCP()
__all__ = ["agent"]

130
apps/ocp/api/agent/main.py Normal file
View File

@@ -0,0 +1,130 @@
import logging
from pathlib import Path
from typing import Any
from langchain_core.messages.ai import AIMessageChunk
from pydantic import BaseModel, Field
from banortegpt.storage.azure_storage import AzureStorage
from banortegpt.vector.qdrant import AsyncQdrant
from langchain_azure_ai.chat_models import AzureAIChatCompletionsModel
from langchain_azure_ai.embeddings import AzureAIEmbeddingsModel
import api.context as ctx
from api.config import config
logger = logging.getLogger(__name__)
parent = Path(__file__).parent
SYSTEM_PROMPT = (parent / "system_prompt.md").read_text()
class get_information(BaseModel):
"""Search a private repository for information."""
question: str = Field(..., description="The user question")
AZURE_AI_URI = "https://eastus2.api.cognitive.microsoft.com"
class MayaOCP:
system_prompt = SYSTEM_PROMPT
generation_config = {
"temperature": config.model_temperature,
}
message_limit = config.message_limit
index = config.vector_index
limit = config.search_limit
bucket = config.storage_bucket
search = AsyncQdrant.from_config(config)
llm = AzureAIChatCompletionsModel(
endpoint=f"{AZURE_AI_URI}/openai/deployments/{config.model}",
credential=config.openai_api_key,
).bind_tools([get_information])
embedder = AzureAIEmbeddingsModel(
endpoint=f"{AZURE_AI_URI}/openai/deployments/{config.embedding_model}",
credential=config.openai_api_key,
)
storage = AzureStorage.from_config(config)
def __init__(self) -> None:
self.tool_map = {"get_information": self.get_information}
def build_response(self, payloads):
preface = ["Recuerda citar las referencias en el formato: texto[1]."]
template = "------ REFERENCIA {index} ----- \n\n{content}"
filled_templates = [
template.format(index=idx, content=payload.get("content", ""))
for idx, payload in enumerate(payloads)
]
return "\n".join(preface + filled_templates)
async def get_information(self, question: str):
logger.info(
f"Embedding question: {question} with model {self.embedder.model_name}"
)
embedding = await self.embedder.aembed_query(question)
results = await self.search.semantic_search(
embedding=embedding, collection=self.index, limit=self.limit
)
tool_response = self.build_response(results)
return tool_response, results
async def get_shareable_urls(self, metadatas: list):
reference_urls = []
image_urls = []
for metadata in metadatas:
if file := metadata.get("file"):
reference_url = await self.storage.get_file_url(
filename=file,
bucket=self.bucket,
minute_duration=20,
image=False,
)
reference_urls.append(reference_url)
if image_file := metadata.get("image"):
image_url = await self.storage.get_file_url(
filename=image_file,
bucket=self.bucket,
minute_duration=20,
image=True,
)
image_urls.append(image_url)
return reference_urls, image_urls
def _generation_config_overwrite(self, overwrites: dict | None) -> dict[str, Any]:
generation_config_copy = self.generation_config.copy()
if overwrites:
for k, v in overwrites.items():
generation_config_copy[k] = v
return generation_config_copy
async def stream(self, history, overwrites: dict | None = None):
generation_config = self._generation_config_overwrite(overwrites)
async for chunk in self.llm.astream(input=history, **generation_config):
assert isinstance(chunk, AIMessageChunk)
if call := chunk.tool_call_chunks:
if tool_id := call[0].get("id"):
ctx.tool_id.set(tool_id)
if name := call[0].get("name"):
ctx.tool_name.set(name)
if args := call[0].get("args"):
ctx.tool_buffer.set(ctx.tool_buffer.get() + args)
else:
if buffer := chunk.content:
assert isinstance(buffer, str)
ctx.buffer.set(ctx.buffer.get() + buffer)
yield buffer
async def generate(self, history, overwrites: dict | None = None):
generation_config = self._generation_config_overwrite(overwrites)
return await self.llm.ainvoke(input=history, **generation_config)

View File

@@ -0,0 +1,4 @@
Eres ChatOCP, un amigable y profesional asistente virtual de la Oficina Corporativa de Proyectos (OCP) de Banorte.
Tu objetivo es responder preguntas de usuarios de manera informativa y detallada.
Para responder TODAS las preguntas, utiliza la herramienta 'get_information' para obtener referencias relevantes a la pregunta de nuestro repositorio interno de documentos.
Utiliza las referencias para responder la pregunta del usuario, y cita tu respuesta con el numero de referencia. Ejemplo: este es un texto[1], este es otro texto[2].

View File

@@ -0,0 +1,19 @@
[
{
"type": "function",
"function": {
"name": "get_information",
"description": "Search a private repository for information.",
"parameters": {
"type": "object",
"properties": {
"question": {
"type": "string",
"description": "The user question"
}
},
"required": ["question"]
}
}
}
]

66
apps/ocp/api/config.py Normal file
View File

@@ -0,0 +1,66 @@
from hvac import Client
from pydantic import Field
from pydantic_settings import BaseSettings
client = Client(url="https://vault.ia-innovacion.work")
if not client.is_authenticated():
raise Exception("Vault authentication failed")
secret_map = client.secrets.kv.v2.read_secret_version(
path="banortegpt", mount_point="secret"
)["data"]["data"]
class Settings(BaseSettings):
# Config
log_level: str = "warning"
service_name: str = "MayaOCP"
model: str = "gpt-4o"
model_temperature: int = 0
embedding_model: str = "text-embedding-3-large"
message_limit: int = 10
storage_bucket: str = "ocpreferences"
vector_index: str = "MayaOCP"
search_limit: int = 3
host: str = "0.0.0.0"
port: int = 8000
# API Keys
azure_endpoint: str = Field(default_factory=lambda: secret_map["azure_endpoint"])
openai_api_key: str = Field(default_factory=lambda: secret_map["openai_api_key"])
openai_api_version: str = Field(
default_factory=lambda: secret_map["openai_api_version"]
)
azure_blob_connection_string: str = Field(
default_factory=lambda: secret_map["azure_blob_connection_string"]
)
qdrant_url: str = Field(default_factory=lambda: secret_map["qdrant_api_url"])
qdrant_api_key: str | None = Field(
default_factory=lambda: secret_map["qdrant_api_key"]
)
mongodb_url: str = Field(
default_factory=lambda: secret_map["cosmosdb_connection_string"]
)
otel_exporter_otlp_endpoint: str | None = Field(
default_factory=lambda: secret_map["otel_exporter_otlp_endpoint"]
)
otel_exporter_otlp_headers: str | None = Field(
default_factory=lambda: secret_map["otel_exporter_otlp_headers"]
)
async def init_mongo_db(self):
from banortegpt.database.mongo_memory.models import Conversation
from beanie import init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
client = AsyncIOMotorClient(self.mongodb_url)
await init_beanie(
database=client.banortegptdos,
document_models=[Conversation],
)
config = Settings() # type: ignore

6
apps/ocp/api/context.py Normal file
View File

@@ -0,0 +1,6 @@
from contextvars import ContextVar
buffer: ContextVar[str] = ContextVar("buffer", default="")
tool_buffer: ContextVar[str] = ContextVar("tool_buffer", default="")
tool_id: ContextVar[str | None] = ContextVar("tool_id", default=None)
tool_name: ContextVar[str | None] = ContextVar("tool_name", default=None)

57
apps/ocp/api/server.py Normal file
View File

@@ -0,0 +1,57 @@
import logging
import uuid
from contextlib import asynccontextmanager
import logfire
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from . import services
from .config import config
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(_: FastAPI):
await config.init_mongo_db()
yield
app = FastAPI(lifespan=lifespan)
logfire.instrument_fastapi(app)
class Message(BaseModel):
conversation_id: uuid.UUID
prompt: str
@app.post("/api/v1/conversation")
async def create_conversation():
conversation_id = uuid.uuid4()
await services.create_conversation(conversation_id)
return {"conversation_id": conversation_id}
@app.post("/api/v1/message")
async def send(message: Message, stream: bool = False):
if stream is True:
def b64_sse(func):
async def wrapper(*args, **kwargs):
async for chunk in func(*args, **kwargs):
content = chunk.model_dump_json()
data = f"data: {content}\n\n"
logger.info(f"Yielding Event: {data}")
yield data
return wrapper
sse_stream = b64_sse(services.stream)
generator = sse_stream(message.prompt, message.conversation_id)
return StreamingResponse(generator, media_type="text/event-stream")
else:
response = await services.generate(message.prompt, message.conversation_id)
return response

View File

@@ -0,0 +1,9 @@
from .create_conversation import create_conversation
from .generate_response import generate
from .stream_response import stream
__all__ = [
"stream",
"generate",
"create_conversation",
]

View File

@@ -0,0 +1,9 @@
from uuid import UUID
from banortegpt.database.mongo_memory import crud
from api.agent import agent
async def create_conversation(user_id: UUID) -> None:
await crud.create_conversation(user_id, agent.system_prompt)

View File

@@ -0,0 +1,92 @@
import json
from typing import Any
from uuid import UUID
from banortegpt.database.mongo_memory import crud
from langfuse.decorators import langfuse_context, observe
from pydantic import BaseModel
import api.context as ctx
from api.agent import agent
class Response(BaseModel):
content: str
urls: list[str]
@observe(capture_input=False, capture_output=False)
async def generate(
prompt: str,
conversation_id: UUID,
) -> Response:
conversation = await crud.get_conversation(conversation_id)
if conversation is None:
raise ValueError(f"Conversation with ID {conversation_id} not found")
conversation.add(role="user", content=prompt)
response = await agent.generate(conversation.to_openai_format(agent.message_limit))
reference_urls, image_urls = [], []
if call := response.tool_calls:
if id := call[0].id:
ctx.tool_id.set(id)
if name := call[0].function.name:
ctx.tool_name.set(name)
ctx.tool_buffer.set(call[0].function.arguments)
else:
assert response.content is not None
ctx.buffer.set(response.content)
buffer = ctx.buffer.get()
tool_buffer = ctx.tool_buffer.get()
tool_id = ctx.tool_id.get()
tool_name = ctx.tool_name.get()
if tool_id is not None:
# Si tool_buffer es un string JSON, lo convertimos a diccionario
if isinstance(tool_buffer, str):
try:
tool_args = json.loads(tool_buffer)
except json.JSONDecodeError:
tool_args = {"question": tool_buffer}
else:
tool_args = tool_buffer
response, payloads = await agent.tool_map[tool_name](**tool_args) # type: ignore
assert tool_name is not None
tool_call: dict[str, Any] = agent.llm.build_tool_call(
tool_id, tool_name, tool_buffer
)
tool_call_id: dict[str, Any] = agent.llm.build_tool_call_id(tool_id)
conversation.add("assistant", **tool_call)
conversation.add("tool", content=response, **tool_call_id)
response = await agent.generate(
conversation.to_openai_format(agent.message_limit), {"tools": None}
)
assert response.content is not None
ctx.buffer.set(response.content)
reference_urls, image_urls = await agent.get_shareable_urls(payloads) # type: ignore
buffer = ctx.buffer.get()
if buffer is None:
raise ValueError("No buffer found")
conversation.add(role="assistant", content=buffer)
langfuse_context.update_current_trace(
name=agent.__class__.__name__,
session_id=str(conversation_id),
input=prompt,
output=buffer,
)
return Response(content=buffer, urls=reference_urls + image_urls)

View File

@@ -0,0 +1,110 @@
import json
import logging
from enum import StrEnum
from typing import TypeAlias
from uuid import UUID
from banortegpt.database.mongo_memory import crud
from langfuse.decorators import langfuse_context, observe
from pydantic import BaseModel
import api.context as ctx
from api.agent import agent
logger = logging.getLogger(__name__)
class ChunkType(StrEnum):
START = "start"
TEXT = "text"
REFERENCE = "reference"
IMAGE = "image"
TOOL = "tool"
END = "end"
ERROR = "error"
ContentType: TypeAlias = str | int
class ResponseChunk(BaseModel):
type: ChunkType
content: ContentType | list[ContentType] | None
@observe(capture_input=False, capture_output=False)
async def stream(prompt: str, conversation_id: UUID):
logger.info("Starting stream")
yield ResponseChunk(type=ChunkType.START, content="")
logger.info(f"Fetching conversation {conversation_id}")
conversation = await crud.get_conversation(conversation_id)
assert conversation is not None
logger.info(f"Conversation messages: {conversation.messages}")
if conversation is None:
raise ValueError("Conversation not found")
conversation.add(role="user", content=prompt)
history = conversation.to_openai_format(agent.message_limit, langchain_compat=True)
async for content in agent.stream(history):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
if (tool_id := ctx.tool_id.get()) is not None:
tool_buffer = ctx.tool_buffer.get()
assert tool_buffer is not None
tool_name = ctx.tool_name.get()
assert tool_name is not None
yield ResponseChunk(type=ChunkType.TOOL, content=None)
buffer_dict = json.loads(tool_buffer)
response, payloads = await agent.tool_map[tool_name](**buffer_dict)
conversation.add(
role="assistant",
tool_calls=[
{
"id": tool_id,
"function": {
"name": tool_name,
"arguments": tool_buffer,
},
"type": "function",
}
],
)
conversation.add(role="tool", content=response, tool_call_id=tool_id)
history = conversation.to_openai_format(
agent.message_limit, langchain_compat=True
)
async for content in agent.stream(history, {"tools": None}):
yield ResponseChunk(type=ChunkType.TEXT, content=content)
ref_urls, image_urls = await agent.get_shareable_urls(payloads) # type: ignore
if len(ref_urls) > 0:
yield ResponseChunk(type=ChunkType.REFERENCE, content=ref_urls)
if len(image_urls) > 0:
yield ResponseChunk(type=ChunkType.IMAGE, content=image_urls)
buffer = ctx.buffer.get()
conversation.add(role="assistant", content=buffer)
await conversation.replace()
yield ResponseChunk(type=ChunkType.END, content="")
langfuse_context.update_current_trace(
name=agent.__class__.__name__,
session_id=str(conversation_id),
input=prompt,
output=buffer,
)

64
apps/ocp/gui/App.tsx Normal file
View File

@@ -0,0 +1,64 @@
import { Chat, ChatSidebar } from "@banorte/chat-ui";
import { messageStore } from "./store/messageStore";
import { conversationStore } from "./store/conversationStore";
import { httpRequest } from "./utils/request";
// Assets
import banorteLogo from "./assets/banortelogo.png";
import sidebarMaya from "./assets/sidebar_maya_contigo.png";
import brujulaElipse from "./assets/brujula_elipse.png";
import sendIcon from "./assets/chat_maya_boton_enviar.png";
import userAvatar from "./assets/chat_maya_default_avatar.png";
import botAvatar from "./assets/brujula.png";
function App() {
const { messages, pushMessage } = messageStore();
const {
conversationId,
setConversationId,
setAssistantName,
receivingMsg,
setReceivingMsg
} = conversationStore();
const handleStartConversation = async (user: string, assistant: string): Promise<string> => {
const response = await httpRequest("POST", "/v1/conversation", { user, assistant });
return response.conversation_id;
};
const handleFeedback = async (key: string, rating: string): Promise<void> => {
await httpRequest("POST", "/v1/feedback", { key, rating });
};
const assistant = "MayaOCP";
return (
<div className="w-screen flex flex-col h-screen min-h-screen scrollbar-none">
<div className="w-full flex">
<ChatSidebar
assistant={assistant}
logoSrc={banorteLogo}
sidebarImageSrc={sidebarMaya}
assistantAvatarSrc={brujulaElipse}
/>
<Chat
assistant={assistant}
messages={messages}
pushMessage={pushMessage}
conversationId={conversationId}
setConversationId={setConversationId}
setAssistantName={setAssistantName}
receivingMsg={receivingMsg}
setReceivingMsg={setReceivingMsg}
onStartConversation={handleStartConversation}
sendIcon={sendIcon}
userAvatar={userAvatar}
botAvatar={botAvatar}
onFeedback={handleFeedback}
/>
</div>
</div>
);
}
export default App;

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

16
apps/ocp/gui/index.css Normal file
View File

@@ -0,0 +1,16 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
.markdown a {
color: #0000FF;
text-decoration: underline;
}
.markdown a:hover {
color: #FF0000;
}
.markdown a:visited {
color: #800080;
}

5
apps/ocp/gui/main.tsx Normal file
View File

@@ -0,0 +1,5 @@
import ReactDOM from "react-dom/client";
import App from "./App.tsx";
import "./index.css";
ReactDOM.createRoot(document.getElementById("root")!).render(<App />);

View File

@@ -0,0 +1,19 @@
import { create } from "zustand";
interface conversationState {
assistantName: string;
conversationId: string;
receivingMsg: boolean;
setConversationId: (newId: string) => void;
setAssistantName: (newName: string) => void;
setReceivingMsg: (newState: boolean) => void;
}
export const conversationStore = create<conversationState>()((set) => ({
assistantName: "",
conversationId: "",
receivingMsg: false,
setConversationId: (newId) => set({ conversationId: newId }),
setAssistantName: (newName) => set({ assistantName: newName }),
setReceivingMsg: (newState) => set({ receivingMsg: newState }),
}));

View File

@@ -0,0 +1,14 @@
import { create } from "zustand";
interface messageState {
messages: Array<{ user: boolean; content: string }>;
pushMessage: (newMessage: { user: boolean; content: string }) => void;
resetConversation: () => void;
}
export const messageStore = create<messageState>()((set) => ({
messages: [],
pushMessage: (newMessage) =>
set((state) => ({ messages: [...state.messages, newMessage] })),
resetConversation: () => set(() => ({ messages: [] })),
}));

View File

@@ -0,0 +1,16 @@
export async function httpRequest(
method: string,
endpoint: string,
body: object | null,
) {
const url = "/api" + endpoint;
const data = {
method: method,
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
credentials: "include" as RequestCredentials,
};
return await fetch(url, data).then((response) => response.json());
}

1
apps/ocp/gui/vite-env.d.ts vendored Normal file
View File

@@ -0,0 +1 @@
/// <reference types="vite/client" />

13
apps/ocp/index.html Normal file
View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>MayaOCP</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/gui/main.tsx"></script>
</body>
</html>

40
apps/ocp/package.json Normal file
View File

@@ -0,0 +1,40 @@
{
"name": "ocp",
"private": true,
"version": "0.0.7",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
"preview": "vite preview"
},
"dependencies": {
"@banorte/chat-ui": "workspace:*",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^9.0.1",
"react-spring": "^9.7.4",
"rehype-raw": "^7.0.0",
"sse.js": "^2.5.0",
"zustand": "^4.5.2"
},
"devDependencies": {
"@iconify-icon/react": "^2.1.0",
"@types/react": "^18.2.67",
"@types/react-dom": "^18.2.22",
"@typescript-eslint/eslint-plugin": "^7.3.1",
"@typescript-eslint/parser": "^7.3.1",
"@vitejs/plugin-react": "^4.2.1",
"autoprefixer": "^10.4.19",
"daisyui": "^4.7.3",
"eslint": "^8.57.0",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.6",
"postcss": "^8.4.38",
"tailwind-scrollbar": "^3.1.0",
"tailwindcss": "^3.4.1",
"typescript": "^5.4.3",
"vite": "^5.2.3"
}
}

8117
apps/ocp/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

31
apps/ocp/pyproject.toml Normal file
View File

@@ -0,0 +1,31 @@
[project]
name = "ocp"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12, <4"
dependencies = [
"aiohttp>=3.11.16",
"azure-storage",
"fastapi[standard]>=0.115.6",
"hvac>=2.3.0",
"langchain-azure-ai[opentelemetry]>=0.1.4",
"langfuse>=2.60.2",
"logfire[fastapi,httpx,pymongo,system-metrics]>=3.18.0",
"mongo-memory",
"openai==1.84.0",
"pydantic-settings>=2.8.1",
"qdrant",
]
[tool.uv.sources]
azure-storage = { workspace = true }
qdrant = { workspace = true }
mongo-memory = { workspace = true }
[tool.pyright]
venvPath = "../../."
venv = ".venv"
[dependency-groups]
dev = ["pydeps>=3.0.1"]

View File

@@ -0,0 +1,27 @@
/** @type {import('tailwindcss').Config} */
export default {
content: ["./index.html", "./gui/**/*.{js,ts,jsx,tsx}"],
theme: {
extend: {
backgroundImage: {
"navigation-pattern": "url('./assets/navigation.webp')",
},
},
},
plugins: [
require("daisyui"),
require("tailwind-scrollbar"),
require("@banorte/chat-ui/tailwind")
],
daisyui: {
themes: [
{
light: {
...require("daisyui/src/theming/themes")["light"],
primary: "red",
secondary: "teal",
},
},
],
},
};

25
apps/ocp/tsconfig.json Normal file
View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"target": "ES2020",
"useDefineForClassFields": true,
"lib": ["ES2020", "DOM", "DOM.Iterable", "ES2021.String"],
"module": "ESNext",
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true
},
"include": ["gui"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"composite": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "bundler",
"allowSyntheticDefaultImports": true,
"strict": true
},
"include": ["vite.config.ts"]
}

16
apps/ocp/vite.config.ts Normal file
View File

@@ -0,0 +1,16 @@
import { defineConfig } from "vite";
import react from "@vitejs/plugin-react";
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
server: {
host: "0.0.0.0",
port: 3000,
proxy: {
"/api": {
target: "http://localhost:8000",
},
},
},
});