improve colors

This commit is contained in:
Anibal Angulo
2025-11-06 17:16:30 -06:00
parent c5e0a451c0
commit 314a876744
8 changed files with 486 additions and 186 deletions

View File

@@ -7,24 +7,19 @@ from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from .core.config import settings from .core.config import settings
from .routers.agent import router as agent_router
from .routers.chunking import router as chunking_router from .routers.chunking import router as chunking_router
from .routers.chunking_landingai import router as chunking_landingai_router from .routers.chunking_landingai import router as chunking_landingai_router
from .routers.dataroom import router as dataroom_router from .routers.dataroom import router as dataroom_router
# Import routers
from .routers.files import router as files_router from .routers.files import router as files_router
from .routers.schemas import router as schemas_router from .routers.schemas import router as schemas_router
from .routers.vectors import router as vectors_router from .routers.vectors import router as vectors_router
# from routers.ai import router as ai_router # futuro con Azure OpenAI
# Import config
# Configurar logging # Configurar logging
logging.basicConfig( logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" level=logging.WARNING, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
) )
logging.getLogger("app").setLevel(logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -130,6 +125,8 @@ app.include_router(chunking_landingai_router)
app.include_router(dataroom_router, prefix="/api/v1") app.include_router(dataroom_router, prefix="/api/v1")
app.include_router(agent_router)
# Router para IA # Router para IA
# app.include_router( # app.include_router(
# ai_router, # ai_router,

View File

@@ -0,0 +1,24 @@
from fastapi import APIRouter
from pydantic_ai import Agent
from pydantic_ai.models.openai import OpenAIChatModel
from pydantic_ai.providers.azure import AzureProvider
from pydantic_ai.ui.vercel_ai import VercelAIAdapter
from starlette.requests import Request
from starlette.responses import Response
from app.core.config import settings
provider = AzureProvider(
azure_endpoint=settings.AZURE_OPENAI_ENDPOINT,
api_version=settings.AZURE_OPENAI_API_VERSION,
api_key=settings.AZURE_OPENAI_API_KEY,
)
model = OpenAIChatModel(model_name="gpt-4o", provider=provider)
agent = Agent(model=model)
router = APIRouter(prefix="/api/v1/agent", tags=["Agent"])
@router.post("/chat")
async def chat(request: Request) -> Response:
return await VercelAIAdapter.dispatch_request(request, agent=agent)

View File

@@ -1,9 +1,17 @@
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient, generate_blob_sas, BlobSasPermissions
from azure.core.exceptions import ResourceNotFoundError, ResourceExistsError
from typing import List, Optional, BinaryIO
import logging import logging
from datetime import datetime, timezone, timedelta
import os import os
from datetime import datetime, timedelta, timezone
from typing import BinaryIO, List, Optional
from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError
from azure.storage.blob import (
BlobClient,
BlobSasPermissions,
BlobServiceClient,
ContainerClient,
generate_blob_sas,
)
from ..core.config import settings from ..core.config import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -13,7 +21,7 @@ class AzureBlobService:
""" """
Servicio para interactuar con Azure Blob Storage Servicio para interactuar con Azure Blob Storage
""" """
def __init__(self): def __init__(self):
"""Inicializar el cliente de Azure Blob Storage""" """Inicializar el cliente de Azure Blob Storage"""
try: try:
@@ -21,7 +29,9 @@ class AzureBlobService:
settings.AZURE_STORAGE_CONNECTION_STRING settings.AZURE_STORAGE_CONNECTION_STRING
) )
self.container_name = settings.AZURE_CONTAINER_NAME self.container_name = settings.AZURE_CONTAINER_NAME
logger.info(f"Cliente de Azure Blob Storage inicializado para container: {self.container_name}") logger.info(
f"Cliente de Azure Blob Storage inicializado para container: {self.container_name}"
)
# Configurar CORS automáticamente al inicializar # Configurar CORS automáticamente al inicializar
self._configure_cors() self._configure_cors()
@@ -45,7 +55,7 @@ class AzureBlobService:
allowed_methods=["GET", "HEAD", "OPTIONS"], allowed_methods=["GET", "HEAD", "OPTIONS"],
allowed_headers=["*"], allowed_headers=["*"],
exposed_headers=["*"], exposed_headers=["*"],
max_age_in_seconds=3600 max_age_in_seconds=3600,
) )
# Aplicar la configuración CORS # Aplicar la configuración CORS
@@ -55,15 +65,19 @@ class AzureBlobService:
except Exception as e: except Exception as e:
# No fallar si CORS no se puede configurar (puede que ya esté configurado) # No fallar si CORS no se puede configurar (puede que ya esté configurado)
logger.warning(f"No se pudo configurar CORS automáticamente: {e}") logger.warning(f"No se pudo configurar CORS automáticamente: {e}")
logger.warning("Asegúrate de configurar CORS manualmente en Azure Portal si es necesario") logger.warning(
"Asegúrate de configurar CORS manualmente en Azure Portal si es necesario"
)
async def create_container_if_not_exists(self) -> bool: async def create_container_if_not_exists(self) -> bool:
""" """
Crear el container si no existe Crear el container si no existe
Returns: True si se creó, False si ya existía Returns: True si se creó, False si ya existía
""" """
try: try:
container_client = self.blob_service_client.get_container_client(self.container_name) container_client = self.blob_service_client.get_container_client(
self.container_name
)
container_client.create_container() container_client.create_container()
logger.info(f"Container '{self.container_name}' creado exitosamente") logger.info(f"Container '{self.container_name}' creado exitosamente")
return True return True
@@ -73,217 +87,249 @@ class AzureBlobService:
except Exception as e: except Exception as e:
logger.error(f"Error creando container: {e}") logger.error(f"Error creando container: {e}")
raise e raise e
async def upload_file(self, file_data: BinaryIO, blob_name: str, tema: str = "") -> dict: async def upload_file(
self, file_data: BinaryIO, blob_name: str, tema: str = ""
) -> dict:
""" """
Subir un archivo a Azure Blob Storage Subir un archivo a Azure Blob Storage
Args: Args:
file_data: Datos del archivo file_data: Datos del archivo
blob_name: Nombre del archivo en el blob blob_name: Nombre del archivo en el blob
tema: Tema/carpeta donde guardar el archivo tema: Tema/carpeta donde guardar el archivo (se normaliza a lowercase)
Returns: Returns:
dict: Información del archivo subido dict: Información del archivo subido
""" """
try: try:
# Construir la ruta completa con tema si se proporciona # Normalizar tema a lowercase para consistencia
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name tema_normalized = tema.lower() if tema else ""
# Construir la ruta completa con tema normalizado
full_blob_name = (
f"{tema_normalized}/{blob_name}" if tema_normalized else blob_name
)
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
# Subir el archivo # Subir el archivo
blob_client.upload_blob(file_data, overwrite=True) blob_client.upload_blob(file_data, overwrite=True)
# Obtener propiedades del blob # Obtener propiedades del blob
blob_properties = blob_client.get_blob_properties() blob_properties = blob_client.get_blob_properties()
logger.info(f"Archivo '{full_blob_name}' subido exitosamente") logger.info(f"Archivo '{full_blob_name}' subido exitosamente")
return { return {
"name": blob_name, "name": blob_name,
"full_path": full_blob_name, "full_path": full_blob_name,
"tema": tema, "tema": tema_normalized,
"size": blob_properties.size, "size": blob_properties.size,
"last_modified": blob_properties.last_modified, "last_modified": blob_properties.last_modified,
"url": blob_client.url "url": blob_client.url,
} }
except Exception as e: except Exception as e:
logger.error(f"Error subiendo archivo '{blob_name}': {e}") logger.error(f"Error subiendo archivo '{blob_name}': {e}")
raise e raise e
async def download_file(self, blob_name: str, tema: str = "") -> bytes: async def download_file(self, blob_name: str, tema: str = "") -> bytes:
""" """
Descargar un archivo de Azure Blob Storage Descargar un archivo de Azure Blob Storage
Args: Args:
blob_name: Nombre del archivo blob_name: Nombre del archivo
tema: Tema/carpeta donde está el archivo tema: Tema/carpeta donde está el archivo (búsqueda case-insensitive)
Returns: Returns:
bytes: Contenido del archivo bytes: Contenido del archivo
""" """
try: try:
# Construir la ruta completa # Si se proporciona tema, buscar el archivo de manera case-insensitive
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name if tema:
full_blob_name = await self._find_blob_case_insensitive(blob_name, tema)
else:
full_blob_name = blob_name
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
# Descargar el archivo # Descargar el archivo
blob_data = blob_client.download_blob() blob_data = blob_client.download_blob()
content = blob_data.readall() content = blob_data.readall()
logger.info(f"Archivo '{full_blob_name}' descargado exitosamente") logger.info(f"Archivo '{full_blob_name}' descargado exitosamente")
return content return content
except ResourceNotFoundError: except ResourceNotFoundError:
logger.error(f"Archivo '{full_blob_name}' no encontrado") logger.error(f"Archivo '{full_blob_name}' no encontrado")
raise FileNotFoundError(f"El archivo '{blob_name}' no existe") raise FileNotFoundError(f"El archivo '{blob_name}' no existe")
except Exception as e: except Exception as e:
logger.error(f"Error descargando archivo '{blob_name}': {e}") logger.error(f"Error descargando archivo '{blob_name}': {e}")
raise e raise e
async def delete_file(self, blob_name: str, tema: str = "") -> bool: async def delete_file(self, blob_name: str, tema: str = "") -> bool:
""" """
Eliminar un archivo de Azure Blob Storage Eliminar un archivo de Azure Blob Storage
Args: Args:
blob_name: Nombre del archivo blob_name: Nombre del archivo
tema: Tema/carpeta donde está el archivo tema: Tema/carpeta donde está el archivo (búsqueda case-insensitive)
Returns: Returns:
bool: True si se eliminó exitosamente bool: True si se eliminó exitosamente
""" """
try: try:
# Construir la ruta completa # Si se proporciona tema, buscar el archivo de manera case-insensitive
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name if tema:
full_blob_name = await self._find_blob_case_insensitive(blob_name, tema)
else:
full_blob_name = blob_name
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
# Eliminar el archivo # Eliminar el archivo
blob_client.delete_blob() blob_client.delete_blob()
logger.info(f"Archivo '{full_blob_name}' eliminado exitosamente") logger.info(f"Archivo '{full_blob_name}' eliminado exitosamente")
return True return True
except ResourceNotFoundError: except ResourceNotFoundError:
logger.error(f"Archivo '{full_blob_name}' no encontrado para eliminar") logger.error(f"Archivo '{full_blob_name}' no encontrado para eliminar")
raise FileNotFoundError(f"El archivo '{blob_name}' no existe") raise FileNotFoundError(f"El archivo '{blob_name}' no existe")
except Exception as e: except Exception as e:
logger.error(f"Error eliminando archivo '{blob_name}': {e}") logger.error(f"Error eliminando archivo '{blob_name}': {e}")
raise e raise e
async def list_files(self, tema: str = "") -> List[dict]: async def list_files(self, tema: str = "") -> List[dict]:
""" """
Listar archivos en el container o en un tema específico Listar archivos en el container o en un tema específico
Args: Args:
tema: Tema/carpeta específica (opcional) tema: Tema/carpeta específica (opcional) - filtrado case-insensitive
Returns: Returns:
List[dict]: Lista de archivos con sus propiedades List[dict]: Lista de archivos con sus propiedades
""" """
try: try:
container_client = self.blob_service_client.get_container_client(self.container_name) container_client = self.blob_service_client.get_container_client(
self.container_name
# Filtrar por tema si se proporciona )
name_starts_with = f"{tema}/" if tema else None
# Obtener todos los blobs para hacer filtrado case-insensitive
blobs = container_client.list_blobs(name_starts_with=name_starts_with) blobs = container_client.list_blobs()
files = [] files = []
tema_lower = tema.lower() if tema else ""
for blob in blobs: for blob in blobs:
# Extraer información del blob # Extraer información del blob
blob_tema = os.path.dirname(blob.name) if "/" in blob.name else ""
# Filtrar por tema de manera case-insensitive si se proporciona
if tema and blob_tema.lower() != tema_lower:
continue
blob_info = { blob_info = {
"name": os.path.basename(blob.name), "name": os.path.basename(blob.name),
"full_path": blob.name, "full_path": blob.name,
"tema": os.path.dirname(blob.name) if "/" in blob.name else "", "tema": blob_tema,
"size": blob.size, "size": blob.size,
"last_modified": blob.last_modified, "last_modified": blob.last_modified,
"content_type": blob.content_settings.content_type if blob.content_settings else None "content_type": blob.content_settings.content_type
if blob.content_settings
else None,
} }
files.append(blob_info) files.append(blob_info)
logger.info(f"Listados {len(files)} archivos" + (f" en tema '{tema}'" if tema else "")) logger.info(
f"Listados {len(files)} archivos"
+ (f" en tema '{tema}' (case-insensitive)" if tema else "")
)
return files return files
except Exception as e: except Exception as e:
logger.error(f"Error listando archivos: {e}") logger.error(f"Error listando archivos: {e}")
raise e raise e
async def get_file_info(self, blob_name: str, tema: str = "") -> dict: async def get_file_info(self, blob_name: str, tema: str = "") -> dict:
""" """
Obtener información de un archivo específico Obtener información de un archivo específico
Args: Args:
blob_name: Nombre del archivo blob_name: Nombre del archivo
tema: Tema/carpeta donde está el archivo tema: Tema/carpeta donde está el archivo (búsqueda case-insensitive)
Returns: Returns:
dict: Información del archivo dict: Información del archivo
""" """
try: try:
# Construir la ruta completa # Si se proporciona tema, buscar el archivo de manera case-insensitive
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name if tema:
full_blob_name = await self._find_blob_case_insensitive(blob_name, tema)
# Extraer el tema real del path encontrado
real_tema = (
os.path.dirname(full_blob_name) if "/" in full_blob_name else ""
)
else:
full_blob_name = blob_name
real_tema = ""
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
# Obtener propiedades # Obtener propiedades
properties = blob_client.get_blob_properties() properties = blob_client.get_blob_properties()
return { return {
"name": blob_name, "name": blob_name,
"full_path": full_blob_name, "full_path": full_blob_name,
"tema": tema, "tema": real_tema,
"size": properties.size, "size": properties.size,
"last_modified": properties.last_modified, "last_modified": properties.last_modified,
"content_type": properties.content_settings.content_type, "content_type": properties.content_settings.content_type,
"url": blob_client.url "url": blob_client.url,
} }
except ResourceNotFoundError: except ResourceNotFoundError:
logger.error(f"Archivo '{full_blob_name}' no encontrado") logger.error(f"Archivo '{full_blob_name}' no encontrado")
raise FileNotFoundError(f"El archivo '{blob_name}' no existe") raise FileNotFoundError(f"El archivo '{blob_name}' no existe")
except Exception as e: except Exception as e:
logger.error(f"Error obteniendo info del archivo '{blob_name}': {e}") logger.error(f"Error obteniendo info del archivo '{blob_name}': {e}")
raise e raise e
async def get_download_url(self, blob_name: str, tema: str = "") -> str: async def get_download_url(self, blob_name: str, tema: str = "") -> str:
""" """
Obtener URL de descarga directa para un archivo Obtener URL de descarga directa para un archivo
Args: Args:
blob_name: Nombre del archivo blob_name: Nombre del archivo
tema: Tema/carpeta donde está el archivo tema: Tema/carpeta donde está el archivo (búsqueda case-insensitive)
Returns: Returns:
str: URL de descarga str: URL de descarga
""" """
try: try:
# Construir la ruta completa # Si se proporciona tema, buscar el archivo de manera case-insensitive
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name if tema:
full_blob_name = await self._find_blob_case_insensitive(blob_name, tema)
else:
full_blob_name = blob_name
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
return blob_client.url return blob_client.url
@@ -292,7 +338,9 @@ class AzureBlobService:
logger.error(f"Error obteniendo URL de descarga para '{blob_name}': {e}") logger.error(f"Error obteniendo URL de descarga para '{blob_name}': {e}")
raise e raise e
async def generate_sas_url(self, blob_name: str, tema: str = "", expiry_hours: int = 1) -> str: async def generate_sas_url(
self, blob_name: str, tema: str = "", expiry_hours: int = 1
) -> str:
""" """
Generar una URL SAS (Shared Access Signature) temporal para acceder a un archivo Generar una URL SAS (Shared Access Signature) temporal para acceder a un archivo
@@ -301,7 +349,7 @@ class AzureBlobService:
Args: Args:
blob_name: Nombre del archivo blob_name: Nombre del archivo
tema: Tema/carpeta donde está el archivo tema: Tema/carpeta donde está el archivo (búsqueda case-insensitive)
expiry_hours: Horas de validez de la URL (por defecto 1 hora) expiry_hours: Horas de validez de la URL (por defecto 1 hora)
Returns: Returns:
@@ -310,13 +358,15 @@ class AzureBlobService:
try: try:
from azure.storage.blob import ContentSettings from azure.storage.blob import ContentSettings
# Construir la ruta completa del blob # Si se proporciona tema, buscar el archivo de manera case-insensitive
full_blob_name = f"{tema}/{blob_name}" if tema else blob_name if tema:
full_blob_name = await self._find_blob_case_insensitive(blob_name, tema)
else:
full_blob_name = blob_name
# Obtener cliente del blob # Obtener cliente del blob
blob_client = self.blob_service_client.get_blob_client( blob_client = self.blob_service_client.get_blob_client(
container=self.container_name, container=self.container_name, blob=full_blob_name
blob=full_blob_name
) )
# Verificar que el archivo existe antes de generar el SAS # Verificar que el archivo existe antes de generar el SAS
@@ -327,11 +377,13 @@ class AzureBlobService:
# Esto hace que el navegador muestre el PDF en lugar de descargarlo # Esto hace que el navegador muestre el PDF en lugar de descargarlo
try: try:
content_settings = ContentSettings( content_settings = ContentSettings(
content_type='application/pdf', content_type="application/pdf",
content_disposition='inline' # Clave para mostrar en navegador content_disposition="inline", # Clave para mostrar en navegador
) )
blob_client.set_http_headers(content_settings=content_settings) blob_client.set_http_headers(content_settings=content_settings)
logger.info(f"Headers configurados para visualización inline de '{full_blob_name}'") logger.info(
f"Headers configurados para visualización inline de '{full_blob_name}'"
)
except Exception as e: except Exception as e:
logger.warning(f"No se pudieron configurar headers inline: {e}") logger.warning(f"No se pudieron configurar headers inline: {e}")
@@ -342,9 +394,9 @@ class AzureBlobService:
# Extraer la account key del connection string para generar el SAS # Extraer la account key del connection string para generar el SAS
# El SAS necesita la account key para firmar el token # El SAS necesita la account key para firmar el token
account_key = None account_key = None
for part in settings.AZURE_STORAGE_CONNECTION_STRING.split(';'): for part in settings.AZURE_STORAGE_CONNECTION_STRING.split(";"):
if part.startswith('AccountKey='): if part.startswith("AccountKey="):
account_key = part.split('=', 1)[1] account_key = part.split("=", 1)[1]
break break
if not account_key: if not account_key:
@@ -358,13 +410,15 @@ class AzureBlobService:
account_key=account_key, account_key=account_key,
permission=BlobSasPermissions(read=True), # Solo permisos de lectura permission=BlobSasPermissions(read=True), # Solo permisos de lectura
expiry=expiry_time, expiry=expiry_time,
start=start_time start=start_time,
) )
# Construir la URL completa con el SAS token # Construir la URL completa con el SAS token
sas_url = f"{blob_client.url}?{sas_token}" sas_url = f"{blob_client.url}?{sas_token}"
logger.info(f"SAS URL generada para '{full_blob_name}' (válida por {expiry_hours} horas)") logger.info(
f"SAS URL generada para '{full_blob_name}' (válida por {expiry_hours} horas)"
)
return sas_url return sas_url
except FileNotFoundError: except FileNotFoundError:
@@ -374,6 +428,47 @@ class AzureBlobService:
logger.error(f"Error generando SAS URL para '{blob_name}': {e}") logger.error(f"Error generando SAS URL para '{blob_name}': {e}")
raise e raise e
async def _find_blob_case_insensitive(self, blob_name: str, tema: str) -> str:
"""
Buscar un blob de manera case-insensitive
Args:
blob_name: Nombre del archivo a buscar
tema: Tema donde buscar (case-insensitive)
Returns:
str: Ruta completa del blob encontrado
Raises:
FileNotFoundError: Si no se encuentra el archivo
"""
try:
container_client = self.blob_service_client.get_container_client(
self.container_name
)
blobs = container_client.list_blobs()
tema_lower = tema.lower()
blob_name_lower = blob_name.lower()
for blob in blobs:
blob_tema = os.path.dirname(blob.name) if "/" in blob.name else ""
current_blob_name = os.path.basename(blob.name)
if (
blob_tema.lower() == tema_lower
and current_blob_name.lower() == blob_name_lower
):
return blob.name
# Si no se encuentra, usar la construcción original para que falle apropiadamente
return f"{tema}/{blob_name}"
except Exception as e:
logger.error(f"Error buscando blob case-insensitive: {e}")
# Fallback a construcción original
return f"{tema}/{blob_name}"
# Instancia global del servicio # Instancia global del servicio
azure_service = AzureBlobService() azure_service = AzureBlobService()

View File

@@ -28,6 +28,7 @@ dependencies = [
# LandingAI Document AI # LandingAI Document AI
"landingai-ade>=0.2.1", "landingai-ade>=0.2.1",
"redis-om>=0.3.5", "redis-om>=0.3.5",
"pydantic-ai-slim[google,openai]>=1.11.1",
] ]
[project.scripts] [project.scripts]
dev = "uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload" dev = "uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload"

111
backend/uv.lock generated
View File

@@ -74,6 +74,7 @@ dependencies = [
{ name = "openai" }, { name = "openai" },
{ name = "pdf2image" }, { name = "pdf2image" },
{ name = "pillow" }, { name = "pillow" },
{ name = "pydantic-ai-slim", extra = ["google", "openai"] },
{ name = "pydantic-settings" }, { name = "pydantic-settings" },
{ name = "pypdf" }, { name = "pypdf" },
{ name = "python-dotenv" }, { name = "python-dotenv" },
@@ -97,6 +98,7 @@ requires-dist = [
{ name = "openai", specifier = ">=1.59.6" }, { name = "openai", specifier = ">=1.59.6" },
{ name = "pdf2image", specifier = ">=1.17.0" }, { name = "pdf2image", specifier = ">=1.17.0" },
{ name = "pillow", specifier = ">=11.0.0" }, { name = "pillow", specifier = ">=11.0.0" },
{ name = "pydantic-ai-slim", extras = ["google", "openai"], specifier = ">=1.11.1" },
{ name = "pydantic-settings", specifier = ">=2.10.1" }, { name = "pydantic-settings", specifier = ">=2.10.1" },
{ name = "pypdf", specifier = ">=5.1.0" }, { name = "pypdf", specifier = ">=5.1.0" },
{ name = "python-dotenv", specifier = ">=1.1.1" }, { name = "python-dotenv", specifier = ">=1.1.1" },
@@ -289,6 +291,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" },
] ]
[[package]]
name = "genai-prices"
version = "0.0.36"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/39/e2/45c863fb61cf2d70d948e80d63e4f3db213a957976a2a3564e40ebe8f506/genai_prices-0.0.36.tar.gz", hash = "sha256:1092f5b96168967fa880440dd9dcc9287fd73910b284045f0226a38f628ccbc9", size = 46046, upload-time = "2025-11-05T14:04:13.437Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/16/89/14b4be11b74dd29827bc37b648b0540fcf3bd6530cb48031f1ce7da4594c/genai_prices-0.0.36-py3-none-any.whl", hash = "sha256:7ad39e04fbcdb5cfdc3891e68de6ca1064b6660e06e9ba76fa6f161ff12b32e4", size = 48688, upload-time = "2025-11-05T14:04:12.133Z" },
]
[[package]] [[package]]
name = "google-api-core" name = "google-api-core"
version = "2.28.1" version = "2.28.1"
@@ -486,6 +501,18 @@ grpc = [
{ name = "grpcio", version = "1.76.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "grpcio", version = "1.76.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" },
] ]
[[package]]
name = "griffe"
version = "1.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ec/d7/6c09dd7ce4c7837e4cdb11dce980cb45ae3cd87677298dc3b781b6bce7d3/griffe-1.14.0.tar.gz", hash = "sha256:9d2a15c1eca966d68e00517de5d69dd1bc5c9f2335ef6c1775362ba5b8651a13", size = 424684, upload-time = "2025-09-05T15:02:29.167Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/b1/9ff6578d789a89812ff21e4e0f80ffae20a65d5dd84e7a17873fe3b365be/griffe-1.14.0-py3-none-any.whl", hash = "sha256:0e9d52832cccf0f7188cfe585ba962d2674b241c01916d780925df34873bceb0", size = 144439, upload-time = "2025-09-05T15:02:27.511Z" },
]
[[package]] [[package]]
name = "grpc-google-iam-v1" name = "grpc-google-iam-v1"
version = "0.14.3" version = "0.14.3"
@@ -776,6 +803,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
] ]
[[package]]
name = "importlib-metadata"
version = "8.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "zipp" },
]
sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
]
[[package]] [[package]]
name = "isodate" name = "isodate"
version = "0.7.2" version = "0.7.2"
@@ -1009,6 +1048,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/98/4c/6c0c338ca7182e4ecb7af61049415e7b3513cc6cea9aa5bf8ca508f53539/langsmith-0.4.41-py3-none-any.whl", hash = "sha256:5cdc554e5f0361bf791fdd5e8dea16d5ba9dfce09b3b8f8bba5e99450c569b27", size = 399279, upload-time = "2025-11-04T22:31:30.268Z" }, { url = "https://files.pythonhosted.org/packages/98/4c/6c0c338ca7182e4ecb7af61049415e7b3513cc6cea9aa5bf8ca508f53539/langsmith-0.4.41-py3-none-any.whl", hash = "sha256:5cdc554e5f0361bf791fdd5e8dea16d5ba9dfce09b3b8f8bba5e99450c569b27", size = 399279, upload-time = "2025-11-04T22:31:30.268Z" },
] ]
[[package]]
name = "logfire-api"
version = "4.14.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/59/25/6072086af3b3ac5c2c2f2a6cf89488a1b228ffc6ee0fb357ed1e227efd13/logfire_api-4.14.2.tar.gz", hash = "sha256:bbdeccd931069b76ab811261b41bc52d8b78d1c045fc4b4237dbc085e0fb9bcd", size = 57604, upload-time = "2025-10-24T20:14:40.551Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/c7/b06a83df678fca882c24fb498e628e0406bdb95ffdfa7ae43ecc0a714d52/logfire_api-4.14.2-py3-none-any.whl", hash = "sha256:aa4af2ecb007c3e0095e25ba4526fd8c0e2c0be2ceceac71ca651c4ad86dc713", size = 95021, upload-time = "2025-10-24T20:14:36.161Z" },
]
[[package]] [[package]]
name = "more-itertools" name = "more-itertools"
version = "10.8.0" version = "10.8.0"
@@ -1100,6 +1148,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/74/6bfc3adc81f6c2cea4439f2a734c40e3a420703bbcdc539890096a732bbd/openai-2.7.1-py3-none-any.whl", hash = "sha256:2f2530354d94c59c614645a4662b9dab0a5b881c5cd767a8587398feac0c9021", size = 1008780, upload-time = "2025-11-04T06:07:20.818Z" }, { url = "https://files.pythonhosted.org/packages/8c/74/6bfc3adc81f6c2cea4439f2a734c40e3a420703bbcdc539890096a732bbd/openai-2.7.1-py3-none-any.whl", hash = "sha256:2f2530354d94c59c614645a4662b9dab0a5b881c5cd767a8587398feac0c9021", size = 1008780, upload-time = "2025-11-04T06:07:20.818Z" },
] ]
[[package]]
name = "opentelemetry-api"
version = "1.38.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "importlib-metadata" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/08/d8/0f354c375628e048bd0570645b310797299754730079853095bf000fba69/opentelemetry_api-1.38.0.tar.gz", hash = "sha256:f4c193b5e8acb0912b06ac5b16321908dd0843d75049c091487322284a3eea12", size = 65242, upload-time = "2025-10-16T08:35:50.25Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" },
]
[[package]] [[package]]
name = "orjson" name = "orjson"
version = "3.11.4" version = "3.11.4"
@@ -1364,6 +1425,32 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
] ]
[[package]]
name = "pydantic-ai-slim"
version = "1.11.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "genai-prices" },
{ name = "griffe" },
{ name = "httpx" },
{ name = "opentelemetry-api" },
{ name = "pydantic" },
{ name = "pydantic-graph" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/90/a5/fbfcdd3c89549dd44417606af0130f1118aea8e43f4d14723e49218901a6/pydantic_ai_slim-1.11.1.tar.gz", hash = "sha256:242fb5c7a0f812d540f68d4e2e6498730ef11644b55ccf3da38bf9767802f742", size = 298765, upload-time = "2025-11-06T00:48:42.815Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/6d/d8ea48afdd8838d6419cdbc08d81753e2e732ff3451e3d83f6b4b56388af/pydantic_ai_slim-1.11.1-py3-none-any.whl", hash = "sha256:00ca8b0a8f677fa9efd077239b66c925423d1dc517dfac7953b62547a66adbf2", size = 397971, upload-time = "2025-11-06T00:48:28.219Z" },
]
[package.optional-dependencies]
google = [
{ name = "google-genai" },
]
openai = [
{ name = "openai" },
]
[[package]] [[package]]
name = "pydantic-core" name = "pydantic-core"
version = "2.33.2" version = "2.33.2"
@@ -1406,6 +1493,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
] ]
[[package]]
name = "pydantic-graph"
version = "1.11.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "logfire-api" },
{ name = "pydantic" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6f/b6/1b37a9517bc71fde33184cc6f3f03795c3669b7be5a143a3012fb112742d/pydantic_graph-1.11.1.tar.gz", hash = "sha256:345d6309ac677ef6cf2f5b225e6762afd9b87cc916b943376a5cb555705a7f2b", size = 57964, upload-time = "2025-11-06T00:48:45.028Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/50/64/934e1f9be64f44515c501bf528cfc2dd672516530d5a7aa7436f72aba5ef/pydantic_graph-1.11.1-py3-none-any.whl", hash = "sha256:4d52d0c925672439e407d64e663a5e7f011f0bb0941c8b6476911044c7478cd6", size = 72002, upload-time = "2025-11-06T00:48:32.411Z" },
]
[[package]] [[package]]
name = "pydantic-settings" name = "pydantic-settings"
version = "2.10.1" version = "2.10.1"
@@ -2148,6 +2250,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" },
] ]
[[package]]
name = "zipp"
version = "3.23.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
]
[[package]] [[package]]
name = "zstandard" name = "zstandard"
version = "0.25.0" version = "0.25.0"

View File

@@ -16,7 +16,6 @@ import { Checkbox } from "@/components/ui/checkbox";
import { FileUpload } from "./FileUpload"; import { FileUpload } from "./FileUpload";
import { DeleteConfirmDialog } from "./DeleteConfirmDialog"; import { DeleteConfirmDialog } from "./DeleteConfirmDialog";
import { PDFPreviewModal } from "./PDFPreviewModal"; import { PDFPreviewModal } from "./PDFPreviewModal";
import { CollectionVerifier } from "./CollectionVerifier";
import { ChunkViewerModal } from "./ChunkViewerModal"; import { ChunkViewerModal } from "./ChunkViewerModal";
import { import {
ChunkingConfigModalLandingAI, ChunkingConfigModalLandingAI,
@@ -31,6 +30,9 @@ import {
Eye, Eye,
MessageSquare, MessageSquare,
Scissors, Scissors,
Loader2,
CheckCircle2,
AlertCircle,
} from "lucide-react"; } from "lucide-react";
interface DashboardProps { interface DashboardProps {
@@ -78,14 +80,77 @@ export function Dashboard({ onProcessingChange }: DashboardProps = {}) {
const [chunkingCollectionName, setChunkingCollectionName] = useState(""); const [chunkingCollectionName, setChunkingCollectionName] = useState("");
const [processing, setProcessing] = useState(false); const [processing, setProcessing] = useState(false);
// Collection status states
const [isCheckingCollection, setIsCheckingCollection] = useState(false);
const [collectionExists, setCollectionExists] = useState<boolean | null>(
null,
);
const [collectionError, setCollectionError] = useState<string | null>(null);
useEffect(() => { useEffect(() => {
loadFiles(); loadFiles();
}, [selectedTema]); }, [selectedTema]);
// Check collection status when tema changes
useEffect(() => {
checkCollectionStatus();
}, [selectedTema]);
const checkCollectionStatus = async () => {
if (!selectedTema) {
setCollectionExists(null);
return;
}
setIsCheckingCollection(true);
setCollectionError(null);
try {
const result = await api.checkCollectionExists(selectedTema);
setCollectionExists(result.exists);
} catch (err) {
console.error("Error checking collection:", err);
setCollectionError(
err instanceof Error ? err.message : "Error al verificar colección",
);
setCollectionExists(null);
} finally {
setIsCheckingCollection(false);
}
};
const handleCreateCollection = async () => {
if (!selectedTema) return;
setIsCheckingCollection(true);
setCollectionError(null);
try {
const result = await api.createCollection(selectedTema);
if (result.success) {
setCollectionExists(true);
console.log(`Collection "${selectedTema}" created successfully`);
}
} catch (err) {
console.error("Error creating collection:", err);
setCollectionError(
err instanceof Error ? err.message : "Error al crear colección",
);
} finally {
setIsCheckingCollection(false);
}
};
const loadFiles = async () => { const loadFiles = async () => {
// Don't load files if no dataroom is selected
if (!selectedTema) {
setFiles([]);
return;
}
try { try {
setLoading(true); setLoading(true);
const response = await api.getFiles(selectedTema || undefined); const response = await api.getFiles(selectedTema);
setFiles(response.files); setFiles(response.files);
} catch (error) { } catch (error) {
console.error("Error loading files:", error); console.error("Error loading files:", error);
@@ -311,13 +376,54 @@ export function Dashboard({ onProcessingChange }: DashboardProps = {}) {
<div className="border-b border-gray-200 px-6 py-4"> <div className="border-b border-gray-200 px-6 py-4">
<div className="flex flex-wrap items-center justify-between gap-4"> <div className="flex flex-wrap items-center justify-between gap-4">
<div> <div>
<h2 className="text-2xl font-semibold text-gray-900"> <div className="flex items-center gap-3 mb-2">
<h2 className="text-2xl font-semibold text-gray-900">
{selectedTema
? `Dataroom: ${selectedTema}`
: "Selecciona un dataroom"}
</h2>
{/* Collection Status Indicator */}
{selectedTema && (
<div className="flex items-center gap-2">
{isCheckingCollection ? (
<>
<Loader2 className="w-4 h-4 animate-spin text-gray-500" />
<span className="text-xs text-gray-500">
Verificando...
</span>
</>
) : collectionExists === true ? (
<>
<CheckCircle2 className="w-4 h-4 text-green-600" />
<span className="text-xs text-green-600">
Colección disponible
</span>
</>
) : collectionExists === false ? (
<>
<AlertCircle className="w-4 h-4 text-yellow-600" />
<button
onClick={handleCreateCollection}
className="text-xs text-yellow-600 hover:text-yellow-700 underline"
>
Crear colección
</button>
</>
) : collectionError ? (
<>
<AlertCircle className="w-4 h-4 text-red-600" />
<span className="text-xs text-red-600">
Error de conexión
</span>
</>
) : null}
</div>
)}
</div>
<p className="text-sm text-gray-600">
{selectedTema {selectedTema
? `Tema actual: ${selectedTema}` ? `${totalFiles} archivo${totalFiles !== 1 ? "s" : ""}`
: "Todos los archivos"} : "Selecciona un dataroom de la barra lateral para ver sus archivos"}
</h2>
<p className="mt-1 text-sm text-gray-600">
{totalFiles} archivo{totalFiles !== 1 ? "s" : ""}
</p> </p>
</div> </div>
</div> </div>
@@ -422,9 +528,11 @@ export function Dashboard({ onProcessingChange }: DashboardProps = {}) {
<div className="flex flex-col items-center justify-center h-64"> <div className="flex flex-col items-center justify-center h-64">
<FileText className="w-12 h-12 text-gray-400 mb-4" /> <FileText className="w-12 h-12 text-gray-400 mb-4" />
<p className="text-gray-500"> <p className="text-gray-500">
{searchTerm {!selectedTema
? "No se encontraron archivos" ? "Selecciona un dataroom para ver sus archivos"
: "No hay archivos en este tema"} : searchTerm
? "No se encontraron archivos"
: "No hay archivos en este dataroom"}
</p> </p>
</div> </div>
) : ( ) : (
@@ -579,14 +687,6 @@ export function Dashboard({ onProcessingChange }: DashboardProps = {}) {
onDownload={handleDownloadFromPreview} onDownload={handleDownloadFromPreview}
/> />
{/* Collection Verifier - Verifica/crea colección cuando se selecciona un tema */}
<CollectionVerifier
tema={selectedTema}
onVerified={(exists) => {
console.log(`Collection ${selectedTema} exists: ${exists}`);
}}
/>
{/* Chunk Viewer Modal */} {/* Chunk Viewer Modal */}
<ChunkViewerModal <ChunkViewerModal
isOpen={chunkViewerOpen} isOpen={chunkViewerOpen}

View File

@@ -92,14 +92,11 @@ export function Sidebar({
setCreateError(null); setCreateError(null);
try { try {
console.log("Creating dataroom:", trimmed);
const result = await api.createDataroom({ name: trimmed }); const result = await api.createDataroom({ name: trimmed });
console.log("Dataroom created successfully:", result);
// Refresh the datarooms list (this will load all datarooms including the new one) // Refresh the datarooms list (this will load all datarooms including the new one)
console.log("Refreshing dataroom list...");
await loadTemas(); await loadTemas();
console.log("Dataroom list refreshed");
// Select the newly created dataroom // Select the newly created dataroom
setSelectedTema(trimmed); setSelectedTema(trimmed);
@@ -126,7 +123,6 @@ export function Sidebar({
try { try {
setLoading(true); setLoading(true);
const response = await api.getDatarooms(); const response = await api.getDatarooms();
console.log("Raw datarooms response:", response);
// Extract dataroom names from the response with better error handling // Extract dataroom names from the response with better error handling
let dataroomNames: string[] = []; let dataroomNames: string[] = [];
@@ -137,18 +133,23 @@ export function Sidebar({
} }
setTemas(dataroomNames); setTemas(dataroomNames);
console.log("Loaded datarooms:", dataroomNames); // Auto-select first dataroom if none is selected and datarooms are available
if (!selectedTema && dataroomNames.length > 0) {
setSelectedTema(dataroomNames[0]);
}
} catch (error) { } catch (error) {
console.error("Error loading datarooms:", error); console.error("Error loading datarooms:", error);
// Fallback to legacy getTemas if dataroom endpoint fails // Fallback to legacy getTemas if dataroom endpoint fails
try { try {
console.log("Falling back to legacy getTemas endpoint");
const legacyResponse = await api.getTemas(); const legacyResponse = await api.getTemas();
const legacyTemas = Array.isArray(legacyResponse?.temas) const legacyTemas = Array.isArray(legacyResponse?.temas)
? legacyResponse.temas.filter(Boolean) ? legacyResponse.temas.filter(Boolean)
: []; : [];
setTemas(legacyTemas); setTemas(legacyTemas);
console.log("Loaded legacy temas:", legacyTemas); // Auto-select first legacy tema if none is selected
if (!selectedTema && legacyTemas.length > 0) {
setSelectedTema(legacyTemas[0]);
}
} catch (legacyError) { } catch (legacyError) {
console.error("Error loading legacy temas:", legacyError); console.error("Error loading legacy temas:", legacyError);
// Ensure we always set an array, never undefined or null // Ensure we always set an array, never undefined or null
@@ -186,11 +187,9 @@ export function Sidebar({
// 1. Delete the dataroom (this will also delete the vector collection) // 1. Delete the dataroom (this will also delete the vector collection)
try { try {
await api.deleteDataroom(tema); await api.deleteDataroom(tema);
console.log(`Dataroom "${tema}" deleted successfully`);
} catch (error) { } catch (error) {
console.error(`Error deleting dataroom "${tema}":`, error); console.error(`Error deleting dataroom "${tema}":`, error);
// If dataroom deletion fails, fall back to legacy deletion // If dataroom deletion fails, fall back to legacy deletion
console.log("Falling back to legacy deletion methods");
// Eliminar todos los archivos del tema en Azure Blob Storage // Eliminar todos los archivos del tema en Azure Blob Storage
await api.deleteTema(tema); await api.deleteTema(tema);
@@ -200,7 +199,6 @@ export function Sidebar({
const collectionExists = await api.checkCollectionExists(tema); const collectionExists = await api.checkCollectionExists(tema);
if (collectionExists.exists) { if (collectionExists.exists) {
await api.deleteCollection(tema); await api.deleteCollection(tema);
console.log(`Colección "${tema}" eliminada de Qdrant`);
} }
} catch (collectionError) { } catch (collectionError) {
console.warn( console.warn(
@@ -229,28 +227,28 @@ export function Sidebar({
return ( return (
<TooltipProvider delayDuration={100}> <TooltipProvider delayDuration={100}>
<div className="bg-white border-r border-gray-200 flex flex-col h-full transition-[width] duration-300"> <div className="bg-slate-800 border-r border-slate-700 flex flex-col h-full transition-[width] duration-300">
{/* Header */} {/* Header */}
<div <div
className={cn( className={cn(
"border-b border-gray-200 flex items-center gap-3", "border-b border-slate-700 flex items-center gap-3",
collapsed ? "p-4" : "p-6", collapsed ? "p-4" : "p-6",
)} )}
> >
<div <div
className={cn( className={cn(
"flex items-center gap-2 text-gray-900 flex-1", "flex items-center gap-2 text-slate-100 flex-1",
collapsed ? "justify-center" : "justify-start", collapsed ? "justify-center" : "justify-start",
)} )}
> >
<FileText className="h-6 w-6" /> <FileText className="h-6 w-6" />
{!collapsed && <h1 className="text-xl font-semibold">DoRa Luma</h1>} {!collapsed && <h1 className="text-xl font-semibold">Luma</h1>}
</div> </div>
{onToggleCollapse && ( {onToggleCollapse && (
<Button <Button
variant="ghost" variant="ghost"
size="icon" size="icon"
className="text-gray-500 hover:text-gray-900" className="text-slate-400 hover:text-slate-100"
onClick={onToggleCollapse} onClick={onToggleCollapse}
disabled={disabled} disabled={disabled}
aria-label={ aria-label={
@@ -277,7 +275,7 @@ export function Sidebar({
> >
<h2 <h2
className={cn( className={cn(
"text-sm font-medium text-gray-500", "text-sm font-medium text-slate-300",
collapsed && "text-xs text-center", collapsed && "text-xs text-center",
)} )}
> >
@@ -286,10 +284,10 @@ export function Sidebar({
{renderWithTooltip( {renderWithTooltip(
"Crear dataroom", "Crear dataroom",
<Button <Button
variant="outline" variant="ghost"
size="sm" size="sm"
className={cn( className={cn(
"gap-2", "gap-2 bg-slate-700/50 text-slate-200 hover:bg-slate-600 hover:text-slate-100 border border-slate-600",
collapsed collapsed
? "h-10 w-10 p-0 justify-center rounded-full" ? "h-10 w-10 p-0 justify-center rounded-full"
: "", : "",
@@ -303,28 +301,9 @@ export function Sidebar({
)} )}
</div> </div>
{/* Todos los archivos */}
{renderWithTooltip(
"Todos los archivos",
<Button
variant={selectedTema === null ? "secondary" : "ghost"}
className={cn(
"w-full justify-start",
collapsed && "px-0 justify-center",
)}
onClick={() => handleTemaSelect(null)}
disabled={disabled}
>
<FolderIcon className={cn("h-4 w-4", !collapsed && "mr-2")} />
<span className={cn("truncate", collapsed && "sr-only")}>
Todos los archivos
</span>
</Button>,
)}
{/* Lista de temas */} {/* Lista de temas */}
{loading ? ( {loading ? (
<div className="text-sm text-gray-500 px-3 py-2 text-center"> <div className="text-sm text-slate-400 px-3 py-2 text-center">
{collapsed ? "..." : "Cargando..."} {collapsed ? "..." : "Cargando..."}
</div> </div>
) : Array.isArray(temas) && temas.length > 0 ? ( ) : Array.isArray(temas) && temas.length > 0 ? (
@@ -335,7 +314,8 @@ export function Sidebar({
<Button <Button
variant={selectedTema === tema ? "secondary" : "ghost"} variant={selectedTema === tema ? "secondary" : "ghost"}
className={cn( className={cn(
"w-full justify-start", "w-full justify-start text-slate-300 hover:bg-slate-700 hover:text-slate-100",
selectedTema === tema && "bg-slate-700 text-slate-100",
collapsed ? "px-0 justify-center" : "pr-10", collapsed ? "px-0 justify-center" : "pr-10",
)} )}
onClick={() => handleTemaSelect(tema)} onClick={() => handleTemaSelect(tema)}
@@ -353,16 +333,16 @@ export function Sidebar({
<button <button
onClick={(e) => handleDeleteTema(tema, e)} onClick={(e) => handleDeleteTema(tema, e)}
disabled={deletingTema === tema || disabled} disabled={deletingTema === tema || disabled}
className="absolute right-2 top-1/2 -translate-y-1/2 p-1.5 rounded hover:bg-red-100 opacity-0 group-hover:opacity-100 transition-opacity disabled:opacity-50" className="absolute right-2 top-1/2 -translate-y-1/2 p-1.5 rounded hover:bg-red-500/20 opacity-0 group-hover:opacity-100 transition-opacity disabled:opacity-50"
title="Eliminar dataroom y colección" title="Eliminar dataroom y colección"
> >
<Trash2 className="h-4 w-4 text-red-600" /> <Trash2 className="h-4 w-4 text-red-400" />
</button> </button>
)} )}
</div> </div>
)) ))
) : ( ) : (
<div className="text-sm text-gray-500 px-3 py-2 text-center"> <div className="text-sm text-slate-400 px-3 py-2 text-center">
{Array.isArray(temas) && temas.length === 0 {Array.isArray(temas) && temas.length === 0
? "No hay datarooms" ? "No hay datarooms"
: "Cargando datarooms..."} : "Cargando datarooms..."}
@@ -374,7 +354,7 @@ export function Sidebar({
{/* Footer */} {/* Footer */}
<div <div
className={cn( className={cn(
"p-4 border-t border-gray-200 space-y-2", "p-4 border-t border-slate-700 space-y-2",
collapsed && "flex flex-col items-center gap-2", collapsed && "flex flex-col items-center gap-2",
)} )}
> >
@@ -387,7 +367,7 @@ export function Sidebar({
onClick={onNavigateToSchemas} onClick={onNavigateToSchemas}
disabled={disabled} disabled={disabled}
className={cn( className={cn(
"w-full justify-start", "w-full justify-start bg-slate-700 text-slate-100 hover:bg-slate-600",
collapsed && "px-0 justify-center", collapsed && "px-0 justify-center",
)} )}
> >
@@ -400,12 +380,12 @@ export function Sidebar({
{renderWithTooltip( {renderWithTooltip(
"Actualizar datarooms", "Actualizar datarooms",
<Button <Button
variant="outline" variant="ghost"
size="sm" size="sm"
onClick={loadTemas} onClick={loadTemas}
disabled={loading || disabled} disabled={loading || disabled}
className={cn( className={cn(
"w-full justify-start", "w-full justify-start bg-slate-700/50 text-slate-200 hover:bg-slate-600 hover:text-slate-100 border border-slate-600",
collapsed && "px-0 justify-center", collapsed && "px-0 justify-center",
)} )}
> >

View File

@@ -56,17 +56,9 @@ export const api = {
// Obtener todos los datarooms // Obtener todos los datarooms
getDatarooms: async (): Promise<DataroomsResponse> => { getDatarooms: async (): Promise<DataroomsResponse> => {
console.log("Fetching datarooms from:", `${API_BASE_URL}/dataroom/`);
const response = await fetch(`${API_BASE_URL}/dataroom/`); const response = await fetch(`${API_BASE_URL}/dataroom/`);
console.log("Datarooms response status:", response.status); if (!response.ok) throw new Error("Error fetching datarooms");
if (!response.ok) { return response.json();
const errorText = await response.text();
console.error("Datarooms fetch error:", errorText);
throw new Error("Error fetching datarooms");
}
const data = await response.json();
console.log("Datarooms API response:", data);
return data;
}, },
// Crear un nuevo dataroom // Crear un nuevo dataroom