diff --git a/backend/app/core/config.py b/backend/app/core/config.py index 9c11d48..bf700a7 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -1,6 +1,6 @@ -import os from typing import List -from pydantic import validator + +from pydantic import RedisDsn from pydantic_settings import BaseSettings @@ -8,20 +8,22 @@ class Settings(BaseSettings): """ Configuración básica de la aplicación """ - + # Configuración básica de la aplicación APP_NAME: str = "File Manager API" DEBUG: bool = False HOST: str = "0.0.0.0" PORT: int = 8000 - + # Configuración de CORS para React frontend ALLOWED_ORIGINS: List[str] = [ "http://localhost:3000", # React dev server "http://localhost:5173", - "http://frontend:3000", # Docker container name + "http://frontend:3000", # Docker container name ] - + + REDIS_OM_URL: RedisDsn + # Azure Blob Storage configuración AZURE_STORAGE_CONNECTION_STRING: str AZURE_STORAGE_ACCOUNT_NAME: str = "" @@ -52,66 +54,10 @@ class Settings(BaseSettings): # Schemas storage SCHEMAS_DIR: str = "./data/schemas" - @validator("AZURE_STORAGE_CONNECTION_STRING") - def validate_azure_connection_string(cls, v): - """Validar que el connection string de Azure esté presente""" - if not v: - raise ValueError("AZURE_STORAGE_CONNECTION_STRING es requerido") - return v - - @validator("QDRANT_URL") - def validate_qdrant_url(cls, v): - """Validar que la URL de Qdrant esté presente""" - if not v: - raise ValueError("QDRANT_URL es requerido") - return v - - @validator("QDRANT_API_KEY") - def validate_qdrant_api_key(cls, v): - """Validar que la API key de Qdrant esté presente""" - if not v: - raise ValueError("QDRANT_API_KEY es requerido") - return v - - @validator("AZURE_OPENAI_ENDPOINT") - def validate_azure_openai_endpoint(cls, v): - """Validar que el endpoint de Azure OpenAI esté presente""" - if not v: - raise ValueError("AZURE_OPENAI_ENDPOINT es requerido") - return v - - @validator("AZURE_OPENAI_API_KEY") - def validate_azure_openai_api_key(cls, v): - """Validar que la API key de Azure OpenAI esté presente""" - if not v: - raise ValueError("AZURE_OPENAI_API_KEY es requerido") - return v - - @validator("GOOGLE_APPLICATION_CREDENTIALS") - def validate_google_credentials(cls, v): - """Validar que el path de credenciales de Google esté presente""" - if not v: - raise ValueError("GOOGLE_APPLICATION_CREDENTIALS es requerido") - return v - - @validator("GOOGLE_CLOUD_PROJECT") - def validate_google_project(cls, v): - """Validar que el proyecto de Google Cloud esté presente""" - if not v: - raise ValueError("GOOGLE_CLOUD_PROJECT es requerido") - return v - - @validator("LANDINGAI_API_KEY") - def validate_landingai_api_key(cls, v): - """Validar que la API key de LandingAI esté presente""" - if not v: - raise ValueError("LANDINGAI_API_KEY es requerido") - return v - class Config: env_file = ".env" case_sensitive = True # Instancia global de configuración -settings = Settings() \ No newline at end of file +settings = Settings.model_validate({}) diff --git a/backend/app/main.py b/backend/app/main.py index 7831056..865dde2 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,16 +1,21 @@ +import logging +from contextlib import asynccontextmanager + +import uvicorn from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse -import uvicorn -import logging + +from .core.config import settings +from .routers.chunking import router as chunking_router +from .routers.chunking_landingai import router as chunking_landingai_router +from .routers.dataroom import router as dataroom_router # Import routers from .routers.files import router as files_router -from .routers.vectors import router as vectors_router -from .routers.chunking import router as chunking_router from .routers.schemas import router as schemas_router -from .routers.chunking_landingai import router as chunking_landingai_router -from .core.config import settings +from .routers.vectors import router as vectors_router + # from routers.ai import router as ai_router # futuro con Azure OpenAI # Import config @@ -18,18 +23,31 @@ from .core.config import settings # Configurar logging logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" ) logger = logging.getLogger(__name__) +@asynccontextmanager +async def lifespan(_: FastAPI): + logger.info("Iniciando File Manager API...") + logger.info( + f"Conectando a Azure Storage Account: {settings.AZURE_STORAGE_ACCOUNT_NAME}" + ) + logger.info(f"Conectando a Qdrant: {settings.QDRANT_URL}") + + yield + + logger.info("Cerrando File Manager API...") + # Cleanup de recursos si es necesario + + app = FastAPI( title="File Manager API", description=" DoRa", version="1.0.0", docs_url="/docs", - redoc_url="/redoc" + redoc_url="/redoc", ) # Configurar CORS para React frontend @@ -41,6 +59,7 @@ app.add_middleware( allow_headers=["*"], ) + # Middleware para logging de requests @app.middleware("http") async def log_requests(request, call_next): @@ -49,19 +68,17 @@ async def log_requests(request, call_next): logger.info(f"Response: {response.status_code}") return response + # Manejador global de excepciones @app.exception_handler(HTTPException) async def http_exception_handler(request, exc): logger.error(f"HTTP Exception: {exc.status_code} - {exc.detail}") return JSONResponse( status_code=exc.status_code, - content={ - "error": True, - "message": exc.detail, - "status_code": exc.status_code - } + content={"error": True, "message": exc.detail, "status_code": exc.status_code}, ) + @app.exception_handler(Exception) async def general_exception_handler(request, exc): logger.error(f"Unhandled Exception: {str(exc)}") @@ -70,10 +87,11 @@ async def general_exception_handler(request, exc): content={ "error": True, "message": "Error interno del servidor", - "status_code": 500 - } + "status_code": 500, + }, ) + # Health check endpoint @app.get("/health") async def health_check(): @@ -81,9 +99,10 @@ async def health_check(): return { "status": "healthy", "message": "File Manager API está funcionando correctamente", - "version": "1.0.0" + "version": "1.0.0", } + # Root endpoint @app.get("/") async def root(): @@ -92,27 +111,16 @@ async def root(): "message": "File Manager API", "version": "1.0.0", "docs": "/docs", - "health": "/health" + "health": "/health", } + # Incluir routers -app.include_router( - files_router, - prefix="/api/v1/files", - tags=["files"] -) +app.include_router(files_router, prefix="/api/v1/files", tags=["files"]) -app.include_router( - vectors_router, - prefix="/api/v1", - tags=["vectors"] -) +app.include_router(vectors_router, prefix="/api/v1", tags=["vectors"]) -app.include_router( - chunking_router, - prefix="/api/v1", - tags=["chunking"] -) +app.include_router(chunking_router, prefix="/api/v1", tags=["chunking"]) # Schemas router (nuevo) app.include_router(schemas_router) @@ -120,6 +128,8 @@ app.include_router(schemas_router) # Chunking LandingAI router (nuevo) app.include_router(chunking_landingai_router) +app.include_router(dataroom_router, prefix="/api/v1") + # Router para IA # app.include_router( # ai_router, @@ -127,21 +137,6 @@ app.include_router(chunking_landingai_router) # tags=["ai"] # ) -# Evento de startup -@app.on_event("startup") -async def startup_event(): - logger.info("Iniciando File Manager API...") - logger.info(f"Conectando a Azure Storage Account: {settings.AZURE_STORAGE_ACCOUNT_NAME}") - logger.info(f"Conectando a Qdrant: {settings.QDRANT_URL}") - # validaciones de conexión a Azure - - -# Evento de shutdown -@app.on_event("shutdown") -async def shutdown_event(): - logger.info("Cerrando File Manager API...") - # Cleanup de recursos si es necesario - if __name__ == "__main__": uvicorn.run( @@ -149,5 +144,5 @@ if __name__ == "__main__": host=settings.HOST, port=settings.PORT, reload=settings.DEBUG, - log_level="info" - ) \ No newline at end of file + log_level="info", + ) diff --git a/backend/app/models/dataroom.py b/backend/app/models/dataroom.py new file mode 100644 index 0000000..5384810 --- /dev/null +++ b/backend/app/models/dataroom.py @@ -0,0 +1,10 @@ +from redis_om import HashModel, Migrator + + +class DataRoom(HashModel): + name: str + collection: str + storage: str + + +Migrator().run() diff --git a/backend/app/routers/dataroom.py b/backend/app/routers/dataroom.py new file mode 100644 index 0000000..353f1ec --- /dev/null +++ b/backend/app/routers/dataroom.py @@ -0,0 +1,150 @@ +import logging + +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel + +from ..models.dataroom import DataRoom +from ..models.vector_models import CollectionCreateRequest +from ..services.vector_service import vector_service + +logger = logging.getLogger(__name__) + + +class DataroomCreate(BaseModel): + name: str + collection: str = "" + storage: str = "" + + +router = APIRouter(prefix="/dataroom", tags=["Dataroom"]) + + +@router.get("/") +async def list_datarooms(): + """ + Listar todos los temas disponibles + """ + try: + # Get all DataRoom instances + datarooms: list[DataRoom] = DataRoom.find().all() + logger.info(f"Found {len(datarooms)} datarooms in Redis") + + # Convert to list of dictionaries + dataroom_list = [ + {"name": room.name, "collection": room.collection, "storage": room.storage} + for room in datarooms + ] + + logger.info(f"Returning dataroom list: {dataroom_list}") + return {"datarooms": dataroom_list} + except Exception as e: + logger.error(f"Error listing datarooms: {e}") + raise HTTPException( + status_code=500, detail=f"Error listing datarooms: {str(e)}" + ) + + +@router.post("/") +async def create_dataroom(dataroom: DataroomCreate): + """ + Crear un nuevo dataroom y su colección vectorial asociada + """ + try: + # Create new DataRoom instance + new_dataroom = DataRoom( + name=dataroom.name, collection=dataroom.collection, storage=dataroom.storage + ) + + # Save to Redis + new_dataroom.save() + + # Create the vector collection for this dataroom + try: + # First check if collection already exists + collection_exists_response = await vector_service.check_collection_exists( + dataroom.name + ) + + if not collection_exists_response.exists: + # Only create if it doesn't exist + collection_request = CollectionCreateRequest( + collection_name=dataroom.name, + vector_size=3072, # Default vector size for embeddings + distance="Cosine", # Default distance metric + ) + await vector_service.create_collection(collection_request) + logger.info(f"Collection '{dataroom.name}' created successfully") + else: + logger.info( + f"Collection '{dataroom.name}' already exists, skipping creation" + ) + except Exception as e: + # Log the error but don't fail the dataroom creation + logger.warning( + f"Could not create collection for dataroom '{dataroom.name}': {e}" + ) + + return { + "message": "Dataroom created successfully", + "dataroom": { + "name": new_dataroom.name, + "collection": new_dataroom.collection, + "storage": new_dataroom.storage, + }, + } + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error creating dataroom: {str(e)}" + ) + + +@router.delete("/{dataroom_name}") +async def delete_dataroom(dataroom_name: str): + """ + Eliminar un dataroom y su colección vectorial asociada + """ + try: + # First check if dataroom exists + existing_datarooms = DataRoom.find().all() + dataroom_exists = any(room.name == dataroom_name for room in existing_datarooms) + + if not dataroom_exists: + raise HTTPException( + status_code=404, detail=f"Dataroom '{dataroom_name}' not found" + ) + + # Delete the vector collection first + try: + collection_exists = await vector_service.check_collection_exists( + dataroom_name + ) + if collection_exists.exists: + await vector_service.delete_collection(dataroom_name) + logger.info( + f"Collection '{dataroom_name}' deleted from vector database" + ) + except Exception as e: + logger.warning( + f"Could not delete collection '{dataroom_name}' from vector database: {e}" + ) + # Continue with dataroom deletion even if collection deletion fails + + # Delete the dataroom from Redis + for room in existing_datarooms: + if room.name == dataroom_name: + # Delete using the primary key + DataRoom.delete(room.pk) + logger.info(f"Dataroom '{dataroom_name}' deleted from Redis") + break + + return { + "message": "Dataroom deleted successfully", + "dataroom_name": dataroom_name, + } + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting dataroom '{dataroom_name}': {e}") + raise HTTPException( + status_code=500, detail=f"Error deleting dataroom: {str(e)}" + ) diff --git a/backend/app/routers/datarooms.py b/backend/app/routers/datarooms.py deleted file mode 100644 index e69de29..0000000 diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 60735a7..323e05c 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -1,18 +1,28 @@ -from fastapi import APIRouter, UploadFile, File, HTTPException, Query, Form -from fastapi.responses import StreamingResponse, Response -from typing import Optional, List +import io import logging import os import zipfile -import io from datetime import datetime +from typing import List, Optional +from fastapi import APIRouter, File, Form, HTTPException, Query, UploadFile +from fastapi.responses import Response, StreamingResponse + +from ..models.dataroom import DataRoom from ..models.file_models import ( - FileUploadRequest, FileUploadResponse, FileInfo, FileListResponse, - FileDeleteResponse, FileBatchDeleteRequest, - FileConflictResponse, FileBatchDeleteResponse, - FileBatchDownloadRequest, TemasListResponse, - FileUploadCheckRequest, FileUploadConfirmRequest, ErrorResponse + ErrorResponse, + FileBatchDeleteRequest, + FileBatchDeleteResponse, + FileBatchDownloadRequest, + FileConflictResponse, + FileDeleteResponse, + FileInfo, + FileListResponse, + FileUploadCheckRequest, + FileUploadConfirmRequest, + FileUploadRequest, + FileUploadResponse, + TemasListResponse, ) from ..services.azure_service import azure_service from ..services.file_service import file_service @@ -31,27 +41,27 @@ async def check_file_before_upload(request: FileUploadCheckRequest): is_valid, error_msg = file_service.validate_filename(request.filename) if not is_valid: raise HTTPException(status_code=400, detail=error_msg) - + # Validar extensión is_valid, error_msg = file_service.validate_file_extension(request.filename) if not is_valid: raise HTTPException(status_code=400, detail=error_msg) - + # Limpiar tema clean_tema = file_service.clean_tema_name(request.tema or "") - + # Verificar si existe conflicto has_conflict, suggested_name = await file_service.handle_file_conflict( request.filename, clean_tema ) - + if has_conflict: return FileConflictResponse( conflict=True, message=f"El archivo '{request.filename}' ya existe en el tema '{clean_tema or 'general'}'", existing_file=request.filename, suggested_name=suggested_name, - tema=clean_tema + tema=clean_tema, ) else: # No hay conflicto, se puede subir directamente @@ -60,14 +70,16 @@ async def check_file_before_upload(request: FileUploadCheckRequest): message="Archivo disponible para subir", existing_file=request.filename, suggested_name=request.filename, - tema=clean_tema + tema=clean_tema, ) - + except HTTPException: raise except Exception as e: logger.error(f"Error verificando archivo '{request.filename}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.post("/upload/confirm", response_model=FileUploadResponse) @@ -75,7 +87,7 @@ async def upload_file_with_confirmation( file: UploadFile = File(...), action: str = Form(...), tema: Optional[str] = Form(None), - new_filename: Optional[str] = Form(None) + new_filename: Optional[str] = Form(None), ): """ Subir archivo con confirmación de acción para conflictos @@ -84,61 +96,54 @@ async def upload_file_with_confirmation( # Validar archivo if not file.filename: raise HTTPException(status_code=400, detail="Nombre de archivo requerido") - + # Crear request de confirmación para validaciones confirm_request = FileUploadConfirmRequest( - filename=file.filename, - tema=tema, - action=action, - new_filename=new_filename + filename=file.filename, tema=tema, action=action, new_filename=new_filename ) - + # Si la acción es cancelar, no hacer nada if confirm_request.action == "cancel": return FileUploadResponse( - success=False, - message="Subida cancelada por el usuario", - file=None + success=False, message="Subida cancelada por el usuario", file=None ) - + # Determinar el nombre final del archivo final_filename = file.filename if confirm_request.action == "rename" and confirm_request.new_filename: final_filename = confirm_request.new_filename - + # Validar extensión del archivo final is_valid, error_msg = file_service.validate_file_extension(final_filename) if not is_valid: raise HTTPException(status_code=400, detail=error_msg) - + # Leer contenido del archivo file_content = await file.read() - + # Validar tamaño del archivo is_valid, error_msg = file_service.validate_file_size(len(file_content)) if not is_valid: raise HTTPException(status_code=400, detail=error_msg) - + # Limpiar tema clean_tema = file_service.clean_tema_name(confirm_request.tema or "") - + # Si es sobrescribir, verificar que el archivo original exista if confirm_request.action == "overwrite": exists = await file_service.check_file_exists(file.filename, clean_tema) if not exists: raise HTTPException( - status_code=404, - detail=f"Archivo '{file.filename}' no existe para sobrescribir" + status_code=404, + detail=f"Archivo '{file.filename}' no existe para sobrescribir", ) - + # Subir archivo a Azure file_stream = io.BytesIO(file_content) uploaded_file_info = await azure_service.upload_file( - file_data=file_stream, - blob_name=final_filename, - tema=clean_tema + file_data=file_stream, blob_name=final_filename, tema=clean_tema ) - + # Crear objeto FileInfo file_info = FileInfo( name=uploaded_file_info["name"], @@ -146,75 +151,95 @@ async def upload_file_with_confirmation( tema=uploaded_file_info["tema"], size=uploaded_file_info["size"], last_modified=uploaded_file_info["last_modified"], - url=uploaded_file_info["url"] + url=uploaded_file_info["url"], ) - + action_msg = { "overwrite": "sobrescrito", - "rename": f"renombrado a '{final_filename}'" + "rename": f"renombrado a '{final_filename}'", } - - logger.info(f"Archivo '{file.filename}' {action_msg.get(confirm_request.action, 'subido')} exitosamente") - + + logger.info( + f"Archivo '{file.filename}' {action_msg.get(confirm_request.action, 'subido')} exitosamente" + ) + return FileUploadResponse( success=True, message=f"Archivo {action_msg.get(confirm_request.action, 'subido')} exitosamente", - file=file_info + file=file_info, ) - + except HTTPException: raise except Exception as e: logger.error(f"Error en subida confirmada: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.post("/upload", response_model=FileUploadResponse) -async def upload_file( - file: UploadFile = File(...), - tema: Optional[str] = Form(None) -): +async def upload_file(file: UploadFile = File(...), tema: Optional[str] = Form(None)): """ Subir un archivo al almacenamiento """ try: + # Validar que el dataroom existe si se proporciona un tema + if tema: + existing_datarooms = DataRoom.find().all() + dataroom_exists = any(room.name == tema for room in existing_datarooms) + + if not dataroom_exists: + raise HTTPException( + status_code=400, + detail=f"El dataroom '{tema}' no existe. Créalo primero antes de subir archivos.", + ) + # Validar archivo if not file.filename: raise HTTPException(status_code=400, detail="Nombre de archivo requerido") - + # Validar extensión del archivo file_extension = os.path.splitext(file.filename)[1].lower() - allowed_extensions = ['.pdf', '.doc', '.docx', '.xls', '.xlsx', '.ppt', '.pptx', '.txt', '.csv'] - + allowed_extensions = [ + ".pdf", + ".doc", + ".docx", + ".xls", + ".xlsx", + ".ppt", + ".pptx", + ".txt", + ".csv", + ] + if file_extension not in allowed_extensions: raise HTTPException( - status_code=400, - detail=f"Tipo de archivo no permitido. Extensiones permitidas: {', '.join(allowed_extensions)}" + status_code=400, + detail=f"Tipo de archivo no permitido. Extensiones permitidas: {', '.join(allowed_extensions)}", ) - + # Leer contenido del archivo file_content = await file.read() - + # Validar tamaño del archivo (100MB máximo) max_size = 100 * 1024 * 1024 # 100MB if len(file_content) > max_size: raise HTTPException( status_code=400, - detail=f"Archivo demasiado grande. Tamaño máximo permitido: 100MB" + detail=f"Archivo demasiado grande. Tamaño máximo permitido: 100MB", ) - + # Procesar tema upload_request = FileUploadRequest(tema=tema) processed_tema = upload_request.tema or "" - + # Subir archivo a Azure file_stream = io.BytesIO(file_content) uploaded_file_info = await azure_service.upload_file( - file_data=file_stream, - blob_name=file.filename, - tema=processed_tema + file_data=file_stream, blob_name=file.filename, tema=processed_tema ) - + # Crear objeto FileInfo file_info = FileInfo( name=uploaded_file_info["name"], @@ -222,22 +247,24 @@ async def upload_file( tema=uploaded_file_info["tema"], size=uploaded_file_info["size"], last_modified=uploaded_file_info["last_modified"], - url=uploaded_file_info["url"] + url=uploaded_file_info["url"], ) - - logger.info(f"Archivo '{file.filename}' subido exitosamente al tema '{processed_tema}'") - + + logger.info( + f"Archivo '{file.filename}' subido exitosamente al tema '{processed_tema}'" + ) + return FileUploadResponse( - success=True, - message="Archivo subido exitosamente", - file=file_info + success=True, message="Archivo subido exitosamente", file=file_info ) - + except HTTPException: raise except Exception as e: logger.error(f"Error subiendo archivo: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/", response_model=FileListResponse) @@ -248,7 +275,7 @@ async def list_files(tema: Optional[str] = Query(None, description="Filtrar por try: # Obtener archivos de Azure files_data = await azure_service.list_files(tema=tema or "") - + # Convertir a objetos FileInfo files_info = [] for file_data in files_data: @@ -258,21 +285,22 @@ async def list_files(tema: Optional[str] = Query(None, description="Filtrar por tema=file_data["tema"], size=file_data["size"], last_modified=file_data["last_modified"], - content_type=file_data.get("content_type") + content_type=file_data.get("content_type"), ) files_info.append(file_info) - - logger.info(f"Listados {len(files_info)} archivos" + (f" del tema '{tema}'" if tema else "")) - - return FileListResponse( - files=files_info, - total=len(files_info), - tema=tema + + logger.info( + f"Listados {len(files_info)} archivos" + + (f" del tema '{tema}'" if tema else "") ) - + + return FileListResponse(files=files_info, total=len(files_info), tema=tema) + except Exception as e: logger.error(f"Error listando archivos: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/temas", response_model=TemasListResponse) @@ -283,31 +311,30 @@ async def list_temas(): try: # Obtener todos los archivos files_data = await azure_service.list_files() - + # Extraer temas únicos temas = set() for file_data in files_data: if file_data["tema"]: temas.add(file_data["tema"]) - + temas_list = sorted(list(temas)) - + logger.info(f"Encontrados {len(temas_list)} temas") - - return TemasListResponse( - temas=temas_list, - total=len(temas_list) - ) - + + return TemasListResponse(temas=temas_list, total=len(temas_list)) + except Exception as e: logger.error(f"Error listando temas: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/{filename}/download") async def download_file( filename: str, - tema: Optional[str] = Query(None, description="Tema donde está el archivo") + tema: Optional[str] = Query(None, description="Tema donde está el archivo"), ): """ Descargar un archivo individual @@ -315,64 +342,71 @@ async def download_file( try: # Descargar archivo de Azure file_content = await azure_service.download_file( - blob_name=filename, - tema=tema or "" + blob_name=filename, tema=tema or "" ) - + # Obtener información del archivo para content-type file_info = await azure_service.get_file_info( - blob_name=filename, - tema=tema or "" + blob_name=filename, tema=tema or "" ) - + # Determinar content-type content_type = file_info.get("content_type", "application/octet-stream") - - logger.info(f"Descargando archivo '{filename}'" + (f" del tema '{tema}'" if tema else "")) - + + logger.info( + f"Descargando archivo '{filename}'" + + (f" del tema '{tema}'" if tema else "") + ) + return Response( content=file_content, media_type=content_type, - headers={ - "Content-Disposition": f"attachment; filename={filename}" - } + headers={"Content-Disposition": f"attachment; filename={filename}"}, ) - + except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Archivo '{filename}' no encontrado") + raise HTTPException( + status_code=404, detail=f"Archivo '{filename}' no encontrado" + ) except Exception as e: logger.error(f"Error descargando archivo '{filename}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.delete("/{filename}", response_model=FileDeleteResponse) async def delete_file( filename: str, - tema: Optional[str] = Query(None, description="Tema donde está el archivo") + tema: Optional[str] = Query(None, description="Tema donde está el archivo"), ): """ Eliminar un archivo """ try: # Eliminar archivo de Azure - await azure_service.delete_file( - blob_name=filename, - tema=tema or "" + await azure_service.delete_file(blob_name=filename, tema=tema or "") + + logger.info( + f"Archivo '{filename}' eliminado exitosamente" + + (f" del tema '{tema}'" if tema else "") ) - - logger.info(f"Archivo '{filename}' eliminado exitosamente" + (f" del tema '{tema}'" if tema else "")) - + return FileDeleteResponse( success=True, message="Archivo eliminado exitosamente", - deleted_file=filename + deleted_file=filename, ) - + except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Archivo '{filename}' no encontrado") + raise HTTPException( + status_code=404, detail=f"Archivo '{filename}' no encontrado" + ) except Exception as e: logger.error(f"Error eliminando archivo '{filename}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.post("/delete-batch", response_model=FileBatchDeleteResponse) @@ -383,34 +417,35 @@ async def delete_batch_files(request: FileBatchDeleteRequest): try: deleted_files = [] failed_files = [] - + for filename in request.files: try: await azure_service.delete_file( - blob_name=filename, - tema=request.tema or "" + blob_name=filename, tema=request.tema or "" ) deleted_files.append(filename) logger.info(f"Archivo '{filename}' eliminado exitosamente") except Exception as e: failed_files.append(filename) logger.error(f"Error eliminando archivo '{filename}': {e}") - + success = len(failed_files) == 0 message = f"Eliminados {len(deleted_files)} archivos exitosamente" if failed_files: message += f", {len(failed_files)} archivos fallaron" - + return FileBatchDeleteResponse( success=success, message=message, deleted_files=deleted_files, - failed_files=failed_files + failed_files=failed_files, ) - + except Exception as e: logger.error(f"Error en eliminación batch: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.post("/download-batch") @@ -421,44 +456,43 @@ async def download_batch_files(request: FileBatchDownloadRequest): try: # Crear ZIP en memoria zip_buffer = io.BytesIO() - - with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: + + with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file: for filename in request.files: try: # Descargar archivo de Azure file_content = await azure_service.download_file( - blob_name=filename, - tema=request.tema or "" + blob_name=filename, tema=request.tema or "" ) - + # Agregar al ZIP zip_file.writestr(filename, file_content) logger.info(f"Archivo '{filename}' agregado al ZIP") - + except Exception as e: logger.error(f"Error agregando '{filename}' al ZIP: {e}") # Continuar con otros archivos continue - + zip_buffer.seek(0) - + # Generar nombre del ZIP timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") zip_filename = f"{request.zip_name}_{timestamp}.zip" - + logger.info(f"ZIP creado exitosamente: {zip_filename}") - + return StreamingResponse( io.BytesIO(zip_buffer.read()), media_type="application/zip", - headers={ - "Content-Disposition": f"attachment; filename={zip_filename}" - } + headers={"Content-Disposition": f"attachment; filename={zip_filename}"}, ) - + except Exception as e: logger.error(f"Error creando ZIP: {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/tema/{tema}/download-all") @@ -469,54 +503,58 @@ async def download_tema_completo(tema: str): try: # Obtener todos los archivos del tema files_data = await azure_service.list_files(tema=tema) - + if not files_data: - raise HTTPException(status_code=404, detail=f"No se encontraron archivos en el tema '{tema}'") - + raise HTTPException( + status_code=404, + detail=f"No se encontraron archivos en el tema '{tema}'", + ) + # Crear ZIP en memoria zip_buffer = io.BytesIO() - - with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: + + with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file: for file_data in files_data: try: filename = file_data["name"] - + # Descargar archivo de Azure file_content = await azure_service.download_file( - blob_name=filename, - tema=tema + blob_name=filename, tema=tema ) - + # Agregar al ZIP zip_file.writestr(filename, file_content) - logger.info(f"Archivo '{filename}' agregado al ZIP del tema '{tema}'") - + logger.info( + f"Archivo '{filename}' agregado al ZIP del tema '{tema}'" + ) + except Exception as e: logger.error(f"Error agregando '{filename}' al ZIP: {e}") # Continuar con otros archivos continue - + zip_buffer.seek(0) - + # Generar nombre del ZIP timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") zip_filename = f"{tema}_{timestamp}.zip" - + logger.info(f"ZIP del tema '{tema}' creado exitosamente: {zip_filename}") - + return StreamingResponse( io.BytesIO(zip_buffer.read()), media_type="application/zip", - headers={ - "Content-Disposition": f"attachment; filename={zip_filename}" - } + headers={"Content-Disposition": f"attachment; filename={zip_filename}"}, ) - + except HTTPException: raise except Exception as e: logger.error(f"Error creando ZIP del tema '{tema}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.delete("/tema/{tema}/delete-all", response_model=FileBatchDeleteResponse) @@ -527,51 +565,59 @@ async def delete_tema_completo(tema: str): try: # Obtener todos los archivos del tema files_data = await azure_service.list_files(tema=tema) - + if not files_data: - raise HTTPException(status_code=404, detail=f"No se encontraron archivos en el tema '{tema}'") - + raise HTTPException( + status_code=404, + detail=f"No se encontraron archivos en el tema '{tema}'", + ) + deleted_files = [] failed_files = [] - + for file_data in files_data: filename = file_data["name"] try: - await azure_service.delete_file( - blob_name=filename, - tema=tema - ) + await azure_service.delete_file(blob_name=filename, tema=tema) deleted_files.append(filename) logger.info(f"Archivo '{filename}' eliminado del tema '{tema}'") except Exception as e: failed_files.append(filename) - logger.error(f"Error eliminando archivo '{filename}' del tema '{tema}': {e}") - + logger.error( + f"Error eliminando archivo '{filename}' del tema '{tema}': {e}" + ) + success = len(failed_files) == 0 - message = f"Tema '{tema}': eliminados {len(deleted_files)} archivos exitosamente" + message = ( + f"Tema '{tema}': eliminados {len(deleted_files)} archivos exitosamente" + ) if failed_files: message += f", {len(failed_files)} archivos fallaron" - - logger.info(f"Eliminación completa del tema '{tema}': {len(deleted_files)} exitosos, {len(failed_files)} fallidos") - + + logger.info( + f"Eliminación completa del tema '{tema}': {len(deleted_files)} exitosos, {len(failed_files)} fallidos" + ) + return FileBatchDeleteResponse( success=success, message=message, deleted_files=deleted_files, - failed_files=failed_files + failed_files=failed_files, ) - + except HTTPException: raise except Exception as e: logger.error(f"Error eliminando tema '{tema}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/{filename}/info", response_model=FileInfo) async def get_file_info( filename: str, - tema: Optional[str] = Query(None, description="Tema donde está el archivo") + tema: Optional[str] = Query(None, description="Tema donde está el archivo"), ): """ Obtener información detallada de un archivo @@ -579,8 +625,7 @@ async def get_file_info( try: # Obtener información de Azure file_data = await azure_service.get_file_info( - blob_name=filename, - tema=tema or "" + blob_name=filename, tema=tema or "" ) # Convertir a objeto FileInfo @@ -591,24 +636,30 @@ async def get_file_info( size=file_data["size"], last_modified=file_data["last_modified"], content_type=file_data.get("content_type"), - url=file_data.get("url") + url=file_data.get("url"), ) logger.info(f"Información obtenida para archivo '{filename}'") return file_info except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Archivo '{filename}' no encontrado") + raise HTTPException( + status_code=404, detail=f"Archivo '{filename}' no encontrado" + ) except Exception as e: logger.error(f"Error obteniendo info del archivo '{filename}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) @router.get("/{filename}/preview-url") async def get_file_preview_url( filename: str, tema: Optional[str] = Query(None, description="Tema donde está el archivo"), - expiry_hours: int = Query(1, description="Horas de validez de la URL (máximo 24)", ge=1, le=24) + expiry_hours: int = Query( + 1, description="Horas de validez de la URL (máximo 24)", ge=1, le=24 + ), ): """ Generar una URL temporal (SAS) para vista previa de archivos @@ -633,23 +684,28 @@ async def get_file_preview_url( try: # Generar SAS URL usando el servicio de Azure sas_url = await azure_service.generate_sas_url( - blob_name=filename, - tema=tema or "", - expiry_hours=expiry_hours + blob_name=filename, tema=tema or "", expiry_hours=expiry_hours ) - logger.info(f"SAS URL generada para preview de '{filename}'" + (f" del tema '{tema}'" if tema else "")) + logger.info( + f"SAS URL generada para preview de '{filename}'" + + (f" del tema '{tema}'" if tema else "") + ) return { "success": True, "filename": filename, "url": sas_url, "expiry_hours": expiry_hours, - "message": f"URL temporal generada (válida por {expiry_hours} hora{'s' if expiry_hours > 1 else ''})" + "message": f"URL temporal generada (válida por {expiry_hours} hora{'s' if expiry_hours > 1 else ''})", } except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Archivo '{filename}' no encontrado") + raise HTTPException( + status_code=404, detail=f"Archivo '{filename}' no encontrado" + ) except Exception as e: logger.error(f"Error generando preview URL para '{filename}': {e}") - raise HTTPException(status_code=500, detail=f"Error interno del servidor: {str(e)}") \ No newline at end of file + raise HTTPException( + status_code=500, detail=f"Error interno del servidor: {str(e)}" + ) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index f6bc0f3..49c4dc5 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -27,6 +27,7 @@ dependencies = [ "langchain-text-splitters>=1.0.0", # LandingAI Document AI "landingai-ade>=0.2.1", + "redis-om>=0.3.5", ] [project.scripts] dev = "uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload" diff --git a/backend/uv.lock b/backend/uv.lock index 6305699..a997156 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.12" resolution-markers = [ "python_full_version >= '3.14'", @@ -79,6 +79,7 @@ dependencies = [ { name = "python-dotenv" }, { name = "python-multipart" }, { name = "qdrant-client" }, + { name = "redis-om" }, { name = "tiktoken" }, { name = "uvicorn", extra = ["standard"] }, { name = "websockets" }, @@ -101,6 +102,7 @@ requires-dist = [ { name = "python-dotenv", specifier = ">=1.1.1" }, { name = "python-multipart", specifier = ">=0.0.20" }, { name = "qdrant-client", specifier = ">=1.15.1" }, + { name = "redis-om", specifier = ">=0.3.5" }, { name = "tiktoken", specifier = ">=0.8.0" }, { name = "uvicorn", extras = ["standard"], specifier = ">=0.35.0" }, { name = "websockets", specifier = ">=14.1" }, @@ -632,6 +634,66 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, ] +[[package]] +name = "hiredis" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" }, + { url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195, upload-time = "2025-10-14T16:32:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808, upload-time = "2025-10-14T16:32:15.622Z" }, + { url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578, upload-time = "2025-10-14T16:32:16.514Z" }, + { url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508, upload-time = "2025-10-14T16:32:17.411Z" }, + { url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341, upload-time = "2025-10-14T16:32:18.561Z" }, + { url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765, upload-time = "2025-10-14T16:32:19.491Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312, upload-time = "2025-10-14T16:32:20.404Z" }, + { url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965, upload-time = "2025-10-14T16:32:21.259Z" }, + { url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533, upload-time = "2025-10-14T16:32:22.192Z" }, + { url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379, upload-time = "2025-10-14T16:32:22.916Z" }, + { url = "https://files.pythonhosted.org/packages/6d/39/2b789ebadd1548ccb04a2c18fbc123746ad1a7e248b7f3f3cac618ca10a6/hiredis-3.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b7048b4ec0d5dddc8ddd03da603de0c4b43ef2540bf6e4c54f47d23e3480a4fa", size = 82035, upload-time = "2025-10-14T16:32:23.715Z" }, + { url = "https://files.pythonhosted.org/packages/85/74/4066d9c1093be744158ede277f2a0a4e4cd0fefeaa525c79e2876e9e5c72/hiredis-3.3.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:e5f86ce5a779319c15567b79e0be806e8e92c18bb2ea9153e136312fafa4b7d6", size = 46219, upload-time = "2025-10-14T16:32:24.554Z" }, + { url = "https://files.pythonhosted.org/packages/fa/3f/f9e0f6d632f399d95b3635703e1558ffaa2de3aea4cfcbc2d7832606ba43/hiredis-3.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fbdb97a942e66016fff034df48a7a184e2b7dc69f14c4acd20772e156f20d04b", size = 41860, upload-time = "2025-10-14T16:32:25.356Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c5/b7dde5ec390dabd1cabe7b364a509c66d4e26de783b0b64cf1618f7149fc/hiredis-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0fb4bea72fe45ff13e93ddd1352b43ff0749f9866263b5cca759a4c960c776f", size = 170094, upload-time = "2025-10-14T16:32:26.148Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d6/7f05c08ee74d41613be466935688068e07f7b6c55266784b5ace7b35b766/hiredis-3.3.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:85b9baf98050e8f43c2826ab46aaf775090d608217baf7af7882596aef74e7f9", size = 181746, upload-time = "2025-10-14T16:32:27.844Z" }, + { url = "https://files.pythonhosted.org/packages/0e/d2/aaf9f8edab06fbf5b766e0cae3996324297c0516a91eb2ca3bd1959a0308/hiredis-3.3.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69079fb0f0ebb61ba63340b9c4bce9388ad016092ca157e5772eb2818209d930", size = 180465, upload-time = "2025-10-14T16:32:29.185Z" }, + { url = "https://files.pythonhosted.org/packages/8d/1e/93ded8b9b484519b211fc71746a231af98c98928e3ebebb9086ed20bb1ad/hiredis-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17f77b79031ea4b0967d30255d2ae6e7df0603ee2426ad3274067f406938236", size = 172419, upload-time = "2025-10-14T16:32:30.059Z" }, + { url = "https://files.pythonhosted.org/packages/68/13/02880458e02bbfcedcaabb8f7510f9dda1c89d7c1921b1bb28c22bb38cbf/hiredis-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d14f745fc177bc05fc24bdf20e2b515e9a068d3d4cce90a0fb78d04c9c9d9a", size = 166400, upload-time = "2025-10-14T16:32:31.173Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/896e03267670570f19f61dc65a2137fcb2b06e83ab0911d58eeec9f3cb88/hiredis-3.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ba063fdf1eff6377a0c409609cbe890389aefddfec109c2d20fcc19cfdafe9da", size = 176845, upload-time = "2025-10-14T16:32:32.12Z" }, + { url = "https://files.pythonhosted.org/packages/f1/90/a1d4bd0cdcf251fda72ac0bd932f547b48ad3420f89bb2ef91bf6a494534/hiredis-3.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1799cc66353ad066bfdd410135c951959da9f16bcb757c845aab2f21fc4ef099", size = 170365, upload-time = "2025-10-14T16:32:33.035Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9a/7c98f7bb76bdb4a6a6003cf8209721f083e65d2eed2b514f4a5514bda665/hiredis-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2cbf71a121996ffac82436b6153290815b746afb010cac19b3290a1644381b07", size = 168022, upload-time = "2025-10-14T16:32:34.81Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ca/672ee658ffe9525558615d955b554ecd36aa185acd4431ccc9701c655c9b/hiredis-3.3.0-cp313-cp313-win32.whl", hash = "sha256:a7cbbc6026bf03659f0b25e94bbf6e64f6c8c22f7b4bc52fe569d041de274194", size = 20533, upload-time = "2025-10-14T16:32:35.7Z" }, + { url = "https://files.pythonhosted.org/packages/20/93/511fd94f6a7b6d72a4cf9c2b159bf3d780585a9a1dca52715dd463825299/hiredis-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:a8def89dd19d4e2e4482b7412d453dec4a5898954d9a210d7d05f60576cedef6", size = 22387, upload-time = "2025-10-14T16:32:36.441Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b3/b948ee76a6b2bc7e45249861646f91f29704f743b52565cf64cee9c4658b/hiredis-3.3.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c135bda87211f7af9e2fd4e046ab433c576cd17b69e639a0f5bb2eed5e0e71a9", size = 82105, upload-time = "2025-10-14T16:32:37.204Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9b/4210f4ebfb3ab4ada964b8de08190f54cbac147198fb463cd3c111cc13e0/hiredis-3.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2f855c678230aed6fc29b962ce1cc67e5858a785ef3a3fd6b15dece0487a2e60", size = 46237, upload-time = "2025-10-14T16:32:38.07Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7a/e38bfd7d04c05036b4ccc6f42b86b1032185cf6ae426e112a97551fece14/hiredis-3.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4059c78a930cbb33c391452ccce75b137d6f89e2eebf6273d75dafc5c2143c03", size = 41894, upload-time = "2025-10-14T16:32:38.929Z" }, + { url = "https://files.pythonhosted.org/packages/28/d3/eae43d9609c5d9a6effef0586ee47e13a0d84b44264b688d97a75cd17ee5/hiredis-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:334a3f1d14c253bb092e187736c3384203bd486b244e726319bbb3f7dffa4a20", size = 170486, upload-time = "2025-10-14T16:32:40.147Z" }, + { url = "https://files.pythonhosted.org/packages/c3/fd/34d664554880b27741ab2916d66207357563b1639e2648685f4c84cfb755/hiredis-3.3.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd137b147235447b3d067ec952c5b9b95ca54b71837e1b38dbb2ec03b89f24fc", size = 182031, upload-time = "2025-10-14T16:32:41.06Z" }, + { url = "https://files.pythonhosted.org/packages/08/a3/0c69fdde3f4155b9f7acc64ccffde46f312781469260061b3bbaa487fd34/hiredis-3.3.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f88f4f2aceb73329ece86a1cb0794fdbc8e6d614cb5ca2d1023c9b7eb432db8", size = 180542, upload-time = "2025-10-14T16:32:42.993Z" }, + { url = "https://files.pythonhosted.org/packages/68/7a/ad5da4d7bc241e57c5b0c4fe95aa75d1f2116e6e6c51577394d773216e01/hiredis-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:550f4d1538822fc75ebf8cf63adc396b23d4958bdbbad424521f2c0e3dfcb169", size = 172353, upload-time = "2025-10-14T16:32:43.965Z" }, + { url = "https://files.pythonhosted.org/packages/4b/dc/c46eace64eb047a5b31acd5e4b0dc6d2f0390a4a3f6d507442d9efa570ad/hiredis-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54b14211fbd5930fc696f6fcd1f1f364c660970d61af065a80e48a1fa5464dd6", size = 166435, upload-time = "2025-10-14T16:32:44.97Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ac/ad13a714e27883a2e4113c980c94caf46b801b810de5622c40f8d3e8335f/hiredis-3.3.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e96f63dbc489fc86f69951e9f83dadb9582271f64f6822c47dcffa6fac7e4a", size = 177218, upload-time = "2025-10-14T16:32:45.936Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/268fabd85b225271fe1ba82cb4a484fcc1bf922493ff2c74b400f1a6f339/hiredis-3.3.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:106e99885d46684d62ab3ec1d6b01573cc0e0083ac295b11aaa56870b536c7ec", size = 170477, upload-time = "2025-10-14T16:32:46.898Z" }, + { url = "https://files.pythonhosted.org/packages/20/6b/02bb8af810ea04247334ab7148acff7a61c08a8832830c6703f464be83a9/hiredis-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:087e2ef3206361281b1a658b5b4263572b6ba99465253e827796964208680459", size = 167915, upload-time = "2025-10-14T16:32:47.847Z" }, + { url = "https://files.pythonhosted.org/packages/83/94/901fa817e667b2e69957626395e6dee416e31609dca738f28e6b545ca6c2/hiredis-3.3.0-cp314-cp314-win32.whl", hash = "sha256:80638ebeab1cefda9420e9fedc7920e1ec7b4f0513a6b23d58c9d13c882f8065", size = 21165, upload-time = "2025-10-14T16:32:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/b1/7e/4881b9c1d0b4cdaba11bd10e600e97863f977ea9d67c5988f7ec8cd363e5/hiredis-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a68aaf9ba024f4e28cf23df9196ff4e897bd7085872f3a30644dca07fa787816", size = 22996, upload-time = "2025-10-14T16:32:51.543Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b6/d7e6c17da032665a954a89c1e6ee3bd12cb51cd78c37527842b03519981d/hiredis-3.3.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f7f80442a32ce51ee5d89aeb5a84ee56189a0e0e875f1a57bbf8d462555ae48f", size = 83034, upload-time = "2025-10-14T16:32:52.395Z" }, + { url = "https://files.pythonhosted.org/packages/27/6c/6751b698060cdd1b2d8427702cff367c9ed7a1705bcf3792eb5b896f149b/hiredis-3.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a1a67530da714954ed50579f4fe1ab0ddbac9c43643b1721c2cb226a50dde263", size = 46701, upload-time = "2025-10-14T16:32:53.572Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8e/20a5cf2c83c7a7e08c76b9abab113f99f71cd57468a9c7909737ce6e9bf8/hiredis-3.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:616868352e47ab355559adca30f4f3859f9db895b4e7bc71e2323409a2add751", size = 42381, upload-time = "2025-10-14T16:32:54.762Z" }, + { url = "https://files.pythonhosted.org/packages/be/0a/547c29c06e8c9c337d0df3eec39da0cf1aad701daf8a9658dd37f25aca66/hiredis-3.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e799b79f3150083e9702fc37e6243c0bd47a443d6eae3f3077b0b3f510d6a145", size = 180313, upload-time = "2025-10-14T16:32:55.644Z" }, + { url = "https://files.pythonhosted.org/packages/89/8a/488de5469e3d0921a1c425045bf00e983d48b2111a90e47cf5769eaa536c/hiredis-3.3.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ef1dfb0d2c92c3701655e2927e6bbe10c499aba632c7ea57b6392516df3864b", size = 190488, upload-time = "2025-10-14T16:32:56.649Z" }, + { url = "https://files.pythonhosted.org/packages/b5/59/8493edc3eb9ae0dbea2b2230c2041a52bc03e390b02ffa3ac0bca2af9aea/hiredis-3.3.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c290da6bc2a57e854c7da9956cd65013483ede935677e84560da3b848f253596", size = 189210, upload-time = "2025-10-14T16:32:57.759Z" }, + { url = "https://files.pythonhosted.org/packages/f0/de/8c9a653922057b32fb1e2546ecd43ef44c9aa1a7cf460c87cae507eb2bc7/hiredis-3.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd8c438d9e1728f0085bf9b3c9484d19ec31f41002311464e75b69550c32ffa8", size = 180972, upload-time = "2025-10-14T16:32:58.737Z" }, + { url = "https://files.pythonhosted.org/packages/e4/a3/51e6e6afaef2990986d685ca6e254ffbd191f1635a59b2d06c9e5d10c8a2/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1bbc6b8a88bbe331e3ebf6685452cebca6dfe6d38a6d4efc5651d7e363ba28bd", size = 175315, upload-time = "2025-10-14T16:32:59.774Z" }, + { url = "https://files.pythonhosted.org/packages/96/54/e436312feb97601f70f8b39263b8da5ac4a5d18305ebdfb08ad7621f6119/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:55d8c18fe9a05496c5c04e6eccc695169d89bf358dff964bcad95696958ec05f", size = 185653, upload-time = "2025-10-14T16:33:00.749Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a3/88e66030d066337c6c0f883a912c6d4b2d6d7173490fbbc113a6cbe414ff/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:4ddc79afa76b805d364e202a754666cb3c4d9c85153cbfed522871ff55827838", size = 179032, upload-time = "2025-10-14T16:33:01.711Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/fb7375467e9adaa371cd617c2984fefe44bdce73add4c70b8dd8cab1b33a/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e8a4b8540581dcd1b2b25827a54cfd538e0afeaa1a0e3ca87ad7126965981cc", size = 176127, upload-time = "2025-10-14T16:33:02.793Z" }, + { url = "https://files.pythonhosted.org/packages/66/14/0dc2b99209c400f3b8f24067273e9c3cb383d894e155830879108fb19e98/hiredis-3.3.0-cp314-cp314t-win32.whl", hash = "sha256:298593bb08487753b3afe6dc38bac2532e9bac8dcee8d992ef9977d539cc6776", size = 22024, upload-time = "2025-10-14T16:33:03.812Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" }, +] + [[package]] name = "hpack" version = "4.1.0" @@ -947,6 +1009,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/4c/6c0c338ca7182e4ecb7af61049415e7b3513cc6cea9aa5bf8ca508f53539/langsmith-0.4.41-py3-none-any.whl", hash = "sha256:5cdc554e5f0361bf791fdd5e8dea16d5ba9dfce09b3b8f8bba5e99450c569b27", size = 399279, upload-time = "2025-11-04T22:31:30.268Z" }, ] +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + [[package]] name = "numpy" version = "2.3.2" @@ -1349,6 +1420,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, ] +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + [[package]] name = "pypdf" version = "6.1.3" @@ -1388,6 +1468,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] +[[package]] +name = "python-ulid" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/8b/0580d8ee0a73a3f3869488856737c429cbaa08b63c3506275f383c4771a8/python-ulid-1.1.0.tar.gz", hash = "sha256:5fb5e4a91db8ca93e8938a613360b3def299b60d41f847279a8c39c9b2e9c65e", size = 19992, upload-time = "2022-03-10T15:11:41.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/8e/c30b08ee9b8dc9b4a10e782c2a7fd5de55388201ddebfe0f7ab99dfbb349/python_ulid-1.1.0-py3-none-any.whl", hash = "sha256:88c952f6be133dbede19c907d72d26717d2691ec8421512b573144794d891e24", size = 9360, upload-time = "2022-03-10T15:11:40.405Z" }, +] + [[package]] name = "pywin32" version = "311" @@ -1449,6 +1538,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/33/d8df6a2b214ffbe4138db9a1efe3248f67dc3c671f82308bea1582ecbbb7/qdrant_client-1.15.1-py3-none-any.whl", hash = "sha256:2b975099b378382f6ca1cfb43f0d59e541be6e16a5892f282a4b8de7eff5cb63", size = 337331, upload-time = "2025-07-31T19:35:17.539Z" }, ] +[[package]] +name = "redis" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyjwt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/cf/128b1b6d7086200c9f387bd4be9b2572a30b90745ef078bd8b235042dc9f/redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c", size = 4626200, upload-time = "2025-07-25T08:06:27.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/26/5c5fa0e83c3621db835cfc1f1d789b37e7fa99ed54423b5f519beb931aa7/redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97", size = 272833, upload-time = "2025-07-25T08:06:26.317Z" }, +] + +[[package]] +name = "redis-om" +version = "0.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "hiredis" }, + { name = "more-itertools" }, + { name = "pydantic" }, + { name = "python-ulid" }, + { name = "redis" }, + { name = "setuptools" }, + { name = "types-redis" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/32/9bdcb86b88f5b53fd9f80019a62970ded91e4befb65c03fee17bdb2bc9f0/redis_om-0.3.5.tar.gz", hash = "sha256:fd152ccebc9b47604287a347628ef0d2c0051c13d5653f121193e801bb1cc4a7", size = 78939, upload-time = "2025-04-04T12:54:51.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/60/2cc6753c2c36a2a5dded8c380c6cad67a26c5878cd7aad56de2eee1d63c8/redis_om-0.3.5-py3-none-any.whl", hash = "sha256:99ab40f696028ce47c5e2eb5118a1ffc1fd193005428df89c8cf77ad35a0177a", size = 86634, upload-time = "2025-04-04T12:54:50.07Z" }, +] + [[package]] name = "regex" version = "2025.11.3" @@ -1566,6 +1687,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + [[package]] name = "shapely" version = "2.1.2" @@ -1716,6 +1846,53 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] +[[package]] +name = "types-cffi" +version = "1.17.0.20250915" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, +] + +[[package]] +name = "types-pyopenssl" +version = "24.1.0.20240722" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "types-cffi" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458, upload-time = "2024-07-22T02:32:22.558Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499, upload-time = "2024-07-22T02:32:21.232Z" }, +] + +[[package]] +name = "types-redis" +version = "4.6.0.20241004" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "types-pyopenssl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679, upload-time = "2024-10-04T02:43:59.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737, upload-time = "2024-10-04T02:43:57.968Z" }, +] + +[[package]] +name = "types-setuptools" +version = "80.9.0.20250822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/bd/1e5f949b7cb740c9f0feaac430e301b8f1c5f11a81e26324299ea671a237/types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965", size = 41296, upload-time = "2025-08-22T03:02:08.771Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/2d/475bf15c1cdc172e7a0d665b6e373ebfb1e9bf734d3f2f543d668b07a142/types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3", size = 63179, upload-time = "2025-08-22T03:02:07.643Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" diff --git a/docker-compose.yml b/docker-compose.yml index 9a9c58d..f980425 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,8 +6,6 @@ services: volumes: - ./frontend:/app - /app/node_modules - environment: - - VITE_API_URL=http://localhost:8000 depends_on: - backend networks: @@ -20,11 +18,22 @@ services: ports: - "8000:8000" volumes: - - ./backend:/app - - /app/.venv + - ./backend/app:/app/app + - ./backend/.secrets:/app/.secrets + env_file: + - .env + networks: + - app-network + + db: + # docker run -p 6379:6379 -p 8001:8001 redis/redis-stack + image: redis/redis-stack:latest + ports: + - 6379:6379 + - 8001:8001 networks: - app-network networks: app-network: - driver: bridge \ No newline at end of file + driver: bridge diff --git a/frontend/src/components/Dashboard.tsx b/frontend/src/components/Dashboard.tsx index 691f2b6..9f9ad37 100644 --- a/frontend/src/components/Dashboard.tsx +++ b/frontend/src/components/Dashboard.tsx @@ -324,22 +324,22 @@ export function Dashboard({ onProcessingChange }: DashboardProps = {}) {
- + Dashboard Files Chat diff --git a/frontend/src/components/Sidebar.tsx b/frontend/src/components/Sidebar.tsx index d0cb243..3f3e824 100644 --- a/frontend/src/components/Sidebar.tsx +++ b/frontend/src/components/Sidebar.tsx @@ -10,6 +10,7 @@ import { ChevronLeft, ChevronRight, RefreshCcw, + Plus, } from "lucide-react"; import { cn } from "@/lib/utils"; import { @@ -18,6 +19,16 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; interface SidebarProps { onNavigateToSchemas?: () => void; @@ -42,6 +53,10 @@ export function Sidebar({ } = useFileStore(); const [deletingTema, setDeletingTema] = useState(null); + const [createDialogOpen, setCreateDialogOpen] = useState(false); + const [newDataroomName, setNewDataroomName] = useState(""); + const [creatingDataroom, setCreatingDataroom] = useState(false); + const [createError, setCreateError] = useState(null); const renderWithTooltip = (label: string, element: ReactElement) => { if (!collapsed) { @@ -58,6 +73,51 @@ export function Sidebar({ ); }; + const handleCreateDialogOpenChange = (open: boolean) => { + setCreateDialogOpen(open); + if (!open) { + setNewDataroomName(""); + setCreateError(null); + } + }; + + const handleCreateDataroom = async () => { + const trimmed = newDataroomName.trim(); + if (!trimmed) { + setCreateError("El nombre es obligatorio"); + return; + } + + setCreatingDataroom(true); + setCreateError(null); + + try { + console.log("Creating dataroom:", trimmed); + const result = await api.createDataroom({ name: trimmed }); + console.log("Dataroom created successfully:", result); + + // Refresh the datarooms list (this will load all datarooms including the new one) + console.log("Refreshing dataroom list..."); + await loadTemas(); + console.log("Dataroom list refreshed"); + + // Select the newly created dataroom + setSelectedTema(trimmed); + + // Close dialog and show success + handleCreateDialogOpenChange(false); + } catch (error) { + console.error("Error creating dataroom:", error); + setCreateError( + error instanceof Error + ? error.message + : "No se pudo crear el dataroom. Inténtalo nuevamente.", + ); + } finally { + setCreatingDataroom(false); + } + }; + useEffect(() => { loadTemas(); }, []); @@ -65,10 +125,35 @@ export function Sidebar({ const loadTemas = async () => { try { setLoading(true); - const response = await api.getTemas(); - setTemas(response.temas); + const response = await api.getDatarooms(); + console.log("Raw datarooms response:", response); + + // Extract dataroom names from the response with better error handling + let dataroomNames: string[] = []; + if (response && response.datarooms && Array.isArray(response.datarooms)) { + dataroomNames = response.datarooms + .filter((dataroom) => dataroom && dataroom.name) + .map((dataroom) => dataroom.name); + } + + setTemas(dataroomNames); + console.log("Loaded datarooms:", dataroomNames); } catch (error) { - console.error("Error loading temas:", error); + console.error("Error loading datarooms:", error); + // Fallback to legacy getTemas if dataroom endpoint fails + try { + console.log("Falling back to legacy getTemas endpoint"); + const legacyResponse = await api.getTemas(); + const legacyTemas = Array.isArray(legacyResponse?.temas) + ? legacyResponse.temas.filter(Boolean) + : []; + setTemas(legacyTemas); + console.log("Loaded legacy temas:", legacyTemas); + } catch (legacyError) { + console.error("Error loading legacy temas:", legacyError); + // Ensure we always set an array, never undefined or null + setTemas([]); + } } finally { setLoading(false); } @@ -85,8 +170,9 @@ export function Sidebar({ e.stopPropagation(); // Evitar que se seleccione el tema al hacer clic en el icono const confirmed = window.confirm( - `¿Estás seguro de que deseas eliminar el tema "${tema}"?\n\n` + + `¿Estás seguro de que deseas eliminar el dataroom "${tema}"?\n\n` + `Esto eliminará:\n` + + `• El dataroom de la base de datos\n` + `• Todos los archivos del tema en Azure Blob Storage\n` + `• La colección "${tema}" en Qdrant (si existe)\n\n` + `Esta acción no se puede deshacer.`, @@ -97,35 +183,44 @@ export function Sidebar({ try { setDeletingTema(tema); - // 1. Eliminar todos los archivos del tema en Azure Blob Storage - await api.deleteTema(tema); - - // 2. Intentar eliminar la colección en Qdrant (si existe) + // 1. Delete the dataroom (this will also delete the vector collection) try { - const collectionExists = await api.checkCollectionExists(tema); - if (collectionExists.exists) { - await api.deleteCollection(tema); - console.log(`Colección "${tema}" eliminada de Qdrant`); - } + await api.deleteDataroom(tema); + console.log(`Dataroom "${tema}" deleted successfully`); } catch (error) { - console.warn( - `No se pudo eliminar la colección "${tema}" de Qdrant:`, - error, - ); - // Continuar aunque falle la eliminación de la colección + console.error(`Error deleting dataroom "${tema}":`, error); + // If dataroom deletion fails, fall back to legacy deletion + console.log("Falling back to legacy deletion methods"); + + // Eliminar todos los archivos del tema en Azure Blob Storage + await api.deleteTema(tema); + + // Intentar eliminar la colección en Qdrant (si existe) + try { + const collectionExists = await api.checkCollectionExists(tema); + if (collectionExists.exists) { + await api.deleteCollection(tema); + console.log(`Colección "${tema}" eliminada de Qdrant`); + } + } catch (collectionError) { + console.warn( + `No se pudo eliminar la colección "${tema}" de Qdrant:`, + collectionError, + ); + } } - // 3. Actualizar la lista de temas + // 2. Actualizar la lista de temas await loadTemas(); - // 4. Si el tema eliminado estaba seleccionado, deseleccionar + // 3. Si el tema eliminado estaba seleccionado, deseleccionar if (selectedTema === tema) { setSelectedTema(null); } } catch (error) { - console.error(`Error eliminando tema "${tema}":`, error); + console.error(`Error eliminando dataroom "${tema}":`, error); alert( - `Error al eliminar el tema: ${error instanceof Error ? error.message : "Error desconocido"}`, + `Error al eliminar el dataroom: ${error instanceof Error ? error.message : "Error desconocido"}`, ); } finally { setDeletingTema(null); @@ -174,14 +269,39 @@ export function Sidebar({ {/* Temas List */}
-

- {collapsed ? "Coll." : "Collections"} -

+

+ {collapsed ? "Rooms" : "Datarooms"} +

+ {renderWithTooltip( + "Crear dataroom", + , + )} +
{/* Todos los archivos */} {renderWithTooltip( @@ -207,7 +327,7 @@ export function Sidebar({
{collapsed ? "..." : "Cargando..."}
- ) : ( + ) : Array.isArray(temas) && temas.length > 0 ? ( temas.map((tema) => (
{renderWithTooltip( @@ -234,13 +354,19 @@ export function Sidebar({ onClick={(e) => handleDeleteTema(tema, e)} disabled={deletingTema === tema || disabled} className="absolute right-2 top-1/2 -translate-y-1/2 p-1.5 rounded hover:bg-red-100 opacity-0 group-hover:opacity-100 transition-opacity disabled:opacity-50" - title="Eliminar tema y colección" + title="Eliminar dataroom y colección" > )}
)) + ) : ( +
+ {Array.isArray(temas) && temas.length === 0 + ? "No hay datarooms" + : "Cargando datarooms..."} +
)}
@@ -272,7 +398,7 @@ export function Sidebar({ , )} {renderWithTooltip( - "Actualizar temas", + "Actualizar datarooms", , )} + + + + Crear dataroom + + Define un nombre único para organizar tus archivos. + + +
+
+ + { + setNewDataroomName(e.target.value); + if (createError) { + setCreateError(null); + } + }} + placeholder="Ej: normativa, contratos, fiscal..." + autoFocus + /> + {createError && ( +

{createError}

+ )} +
+
+ + + + +
+
); } diff --git a/frontend/src/services/api.ts b/frontend/src/services/api.ts index afc4373..baefc0a 100644 --- a/frontend/src/services/api.ts +++ b/frontend/src/services/api.ts @@ -1,191 +1,275 @@ -const API_BASE_URL = 'http://localhost:8000/api/v1' +const API_BASE_URL = "/api/v1"; interface FileUploadResponse { - success: boolean - message: string + success: boolean; + message: string; file?: { - name: string - full_path: string - tema: string - size: number - last_modified: string - url?: string - } + name: string; + full_path: string; + tema: string; + size: number; + last_modified: string; + url?: string; + }; } interface FileListResponse { files: Array<{ - name: string - full_path: string - tema: string - size: number - last_modified: string - content_type?: string - }> - total: number - tema?: string + name: string; + full_path: string; + tema: string; + size: number; + last_modified: string; + content_type?: string; + }>; + total: number; + tema?: string; } interface TemasResponse { - temas: string[] - total: number + temas: string[]; + total: number; +} + +interface DataroomsResponse { + datarooms: Array<{ + name: string; + collection: string; + storage: string; + }>; +} + +interface CreateDataroomRequest { + name: string; + collection?: string; + storage?: string; } // API calls export const api = { - // Obtener todos los temas + // Obtener todos los temas (legacy) getTemas: async (): Promise => { - const response = await fetch(`${API_BASE_URL}/files/temas`) - if (!response.ok) throw new Error('Error fetching temas') - return response.json() + const response = await fetch(`${API_BASE_URL}/files/temas`); + if (!response.ok) throw new Error("Error fetching temas"); + return response.json(); + }, + + // Obtener todos los datarooms + getDatarooms: async (): Promise => { + console.log("Fetching datarooms from:", `${API_BASE_URL}/dataroom/`); + const response = await fetch(`${API_BASE_URL}/dataroom/`); + console.log("Datarooms response status:", response.status); + if (!response.ok) { + const errorText = await response.text(); + console.error("Datarooms fetch error:", errorText); + throw new Error("Error fetching datarooms"); + } + const data = await response.json(); + console.log("Datarooms API response:", data); + return data; + }, + + // Crear un nuevo dataroom + createDataroom: async ( + data: CreateDataroomRequest, + ): Promise<{ + message: string; + dataroom: { + name: string; + collection: string; + storage: string; + }; + }> => { + const response = await fetch(`${API_BASE_URL}/dataroom/`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(data), + }); + if (!response.ok) throw new Error("Error creating dataroom"); + return response.json(); + }, + + // Eliminar un dataroom + deleteDataroom: async ( + dataroomName: string, + ): Promise<{ + message: string; + dataroom_name: string; + }> => { + const response = await fetch( + `${API_BASE_URL}/dataroom/${encodeURIComponent(dataroomName)}`, + { + method: "DELETE", + }, + ); + if (!response.ok) throw new Error("Error deleting dataroom"); + return response.json(); }, // Obtener archivos (todos o por tema) getFiles: async (tema?: string): Promise => { - const url = tema + const url = tema ? `${API_BASE_URL}/files/?tema=${encodeURIComponent(tema)}` - : `${API_BASE_URL}/files/` - - const response = await fetch(url) - if (!response.ok) throw new Error('Error fetching files') - return response.json() + : `${API_BASE_URL}/files/`; + + const response = await fetch(url); + if (!response.ok) throw new Error("Error fetching files"); + return response.json(); }, // Subir archivo - uploadFile: async (file: File, tema?: string): Promise => { - const formData = new FormData() - formData.append('file', file) - if (tema) formData.append('tema', tema) + uploadFile: async ( + file: File, + tema?: string, + ): Promise => { + const formData = new FormData(); + formData.append("file", file); + if (tema) formData.append("tema", tema); const response = await fetch(`${API_BASE_URL}/files/upload`, { - method: 'POST', + method: "POST", body: formData, - }) - - if (!response.ok) throw new Error('Error uploading file') - return response.json() + }); + + if (!response.ok) throw new Error("Error uploading file"); + return response.json(); }, // Eliminar archivo deleteFile: async (filename: string, tema?: string): Promise => { - const url = tema + const url = tema ? `${API_BASE_URL}/files/${encodeURIComponent(filename)}?tema=${encodeURIComponent(tema)}` - : `${API_BASE_URL}/files/${encodeURIComponent(filename)}` - - const response = await fetch(url, { method: 'DELETE' }) - if (!response.ok) throw new Error('Error deleting file') + : `${API_BASE_URL}/files/${encodeURIComponent(filename)}`; + + const response = await fetch(url, { method: "DELETE" }); + if (!response.ok) throw new Error("Error deleting file"); }, // Eliminar múltiples archivos deleteFiles: async (filenames: string[], tema?: string): Promise => { const response = await fetch(`${API_BASE_URL}/files/delete-batch`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - files: filenames, - tema: tema - }), - }) - - if (!response.ok) throw new Error('Error deleting files') - }, - - // Eliminar tema completo - deleteTema: async (tema: string): Promise => { - const response = await fetch(`${API_BASE_URL}/files/tema/${encodeURIComponent(tema)}/delete-all`, { - method: 'DELETE' - }) - - if (!response.ok) throw new Error('Error deleting tema') - }, - - // Descargar archivo individual - downloadFile: async (filename: string, tema?: string): Promise => { - const url = tema - ? `${API_BASE_URL}/files/${encodeURIComponent(filename)}/download?tema=${encodeURIComponent(tema)}` - : `${API_BASE_URL}/files/${encodeURIComponent(filename)}/download` - - const response = await fetch(url) - if (!response.ok) throw new Error('Error downloading file') - - // Crear blob y descargar - const blob = await response.blob() - const downloadUrl = window.URL.createObjectURL(blob) - const link = document.createElement('a') - link.href = downloadUrl - link.download = filename - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - window.URL.revokeObjectURL(downloadUrl) - }, - - // Descargar múltiples archivos como ZIP - downloadMultipleFiles: async (filenames: string[], tema?: string, zipName?: string): Promise => { - const response = await fetch(`${API_BASE_URL}/files/download-batch`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify({ files: filenames, tema: tema, - zip_name: zipName || 'archivos' }), - }) - - if (!response.ok) throw new Error('Error downloading files') - + }); + + if (!response.ok) throw new Error("Error deleting files"); + }, + + // Eliminar tema completo + deleteTema: async (tema: string): Promise => { + const response = await fetch( + `${API_BASE_URL}/files/tema/${encodeURIComponent(tema)}/delete-all`, + { + method: "DELETE", + }, + ); + + if (!response.ok) throw new Error("Error deleting tema"); + }, + + // Descargar archivo individual + downloadFile: async (filename: string, tema?: string): Promise => { + const url = tema + ? `${API_BASE_URL}/files/${encodeURIComponent(filename)}/download?tema=${encodeURIComponent(tema)}` + : `${API_BASE_URL}/files/${encodeURIComponent(filename)}/download`; + + const response = await fetch(url); + if (!response.ok) throw new Error("Error downloading file"); + + // Crear blob y descargar + const blob = await response.blob(); + const downloadUrl = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = downloadUrl; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + window.URL.revokeObjectURL(downloadUrl); + }, + + // Descargar múltiples archivos como ZIP + downloadMultipleFiles: async ( + filenames: string[], + tema?: string, + zipName?: string, + ): Promise => { + const response = await fetch(`${API_BASE_URL}/files/download-batch`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + files: filenames, + tema: tema, + zip_name: zipName || "archivos", + }), + }); + + if (!response.ok) throw new Error("Error downloading files"); + // Crear blob y descargar ZIP - const blob = await response.blob() - const downloadUrl = window.URL.createObjectURL(blob) - const link = document.createElement('a') - link.href = downloadUrl - + const blob = await response.blob(); + const downloadUrl = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = downloadUrl; + // Obtener nombre del archivo del header Content-Disposition - const contentDisposition = response.headers.get('Content-Disposition') - const filename = contentDisposition?.split('filename=')[1]?.replace(/"/g, '') || 'archivos.zip' - - link.download = filename - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - window.URL.revokeObjectURL(downloadUrl) + const contentDisposition = response.headers.get("Content-Disposition"); + const filename = + contentDisposition?.split("filename=")[1]?.replace(/"/g, "") || + "archivos.zip"; + + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + window.URL.revokeObjectURL(downloadUrl); }, // Descargar tema completo downloadTema: async (tema: string): Promise => { - const response = await fetch(`${API_BASE_URL}/files/tema/${encodeURIComponent(tema)}/download-all`) - if (!response.ok) throw new Error('Error downloading tema') + const response = await fetch( + `${API_BASE_URL}/files/tema/${encodeURIComponent(tema)}/download-all`, + ); + if (!response.ok) throw new Error("Error downloading tema"); - const blob = await response.blob() - const downloadUrl = window.URL.createObjectURL(blob) - const link = document.createElement('a') - link.href = downloadUrl + const blob = await response.blob(); + const downloadUrl = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = downloadUrl; - const contentDisposition = response.headers.get('Content-Disposition') - const filename = contentDisposition?.split('filename=')[1]?.replace(/"/g, '') || `${tema}.zip` + const contentDisposition = response.headers.get("Content-Disposition"); + const filename = + contentDisposition?.split("filename=")[1]?.replace(/"/g, "") || + `${tema}.zip`; - link.download = filename - document.body.appendChild(link) - link.click() - document.body.removeChild(link) - window.URL.revokeObjectURL(downloadUrl) + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + window.URL.revokeObjectURL(downloadUrl); }, // Obtener URL temporal para preview de archivos getPreviewUrl: async (filename: string, tema?: string): Promise => { const url = tema ? `${API_BASE_URL}/files/${encodeURIComponent(filename)}/preview-url?tema=${encodeURIComponent(tema)}` - : `${API_BASE_URL}/files/${encodeURIComponent(filename)}/preview-url` + : `${API_BASE_URL}/files/${encodeURIComponent(filename)}/preview-url`; - const response = await fetch(url) - if (!response.ok) throw new Error('Error getting preview URL') + const response = await fetch(url); + if (!response.ok) throw new Error("Error getting preview URL"); - const data = await response.json() - return data.url + const data = await response.json(); + return data.url; }, // ============================================================================ @@ -193,139 +277,178 @@ export const api = { // ============================================================================ // Health check de la base de datos vectorial - vectorHealthCheck: async (): Promise<{ status: string; db_type: string; message: string }> => { - const response = await fetch(`${API_BASE_URL}/vectors/health`) - if (!response.ok) throw new Error('Error checking vector DB health') - return response.json() + vectorHealthCheck: async (): Promise<{ + status: string; + db_type: string; + message: string; + }> => { + const response = await fetch(`${API_BASE_URL}/vectors/health`); + if (!response.ok) throw new Error("Error checking vector DB health"); + return response.json(); }, // Verificar si una colección existe - checkCollectionExists: async (collectionName: string): Promise<{ exists: boolean; collection_name: string }> => { + checkCollectionExists: async ( + collectionName: string, + ): Promise<{ exists: boolean; collection_name: string }> => { const response = await fetch(`${API_BASE_URL}/vectors/collections/exists`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify({ collection_name: collectionName }), - }) - if (!response.ok) throw new Error('Error checking collection') - return response.json() + }); + if (!response.ok) throw new Error("Error checking collection"); + return response.json(); }, // Crear una nueva colección createCollection: async ( collectionName: string, vectorSize: number = 3072, - distance: string = 'Cosine' - ): Promise<{ success: boolean; collection_name: string; message: string }> => { + distance: string = "Cosine", + ): Promise<{ + success: boolean; + collection_name: string; + message: string; + }> => { const response = await fetch(`${API_BASE_URL}/vectors/collections/create`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify({ collection_name: collectionName, vector_size: vectorSize, distance: distance, }), - }) + }); if (!response.ok) { - const error = await response.json() - throw new Error(error.detail || 'Error creating collection') + const error = await response.json(); + throw new Error(error.detail || "Error creating collection"); } - return response.json() + return response.json(); }, // Eliminar una colección - deleteCollection: async (collectionName: string): Promise<{ success: boolean; collection_name: string; message: string }> => { - const response = await fetch(`${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}`, { - method: 'DELETE', - }) - if (!response.ok) throw new Error('Error deleting collection') - return response.json() + deleteCollection: async ( + collectionName: string, + ): Promise<{ + success: boolean; + collection_name: string; + message: string; + }> => { + const response = await fetch( + `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}`, + { + method: "DELETE", + }, + ); + if (!response.ok) throw new Error("Error deleting collection"); + return response.json(); }, // Obtener información de una colección - getCollectionInfo: async (collectionName: string): Promise<{ - name: string - vectors_count: number - vectors_config: { size: number; distance: string } - status: string + getCollectionInfo: async ( + collectionName: string, + ): Promise<{ + name: string; + vectors_count: number; + vectors_config: { size: number; distance: string }; + status: string; }> => { - const response = await fetch(`${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/info`) - if (!response.ok) throw new Error('Error getting collection info') - return response.json() + const response = await fetch( + `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/info`, + ); + if (!response.ok) throw new Error("Error getting collection info"); + return response.json(); }, // Verificar si un archivo existe en una colección checkFileExistsInCollection: async ( collectionName: string, - fileName: string - ): Promise<{ exists: boolean; collection_name: string; file_name: string; chunk_count?: number }> => { + fileName: string, + ): Promise<{ + exists: boolean; + collection_name: string; + file_name: string; + chunk_count?: number; + }> => { const response = await fetch(`${API_BASE_URL}/vectors/files/exists`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify({ collection_name: collectionName, file_name: fileName, }), - }) - if (!response.ok) throw new Error('Error checking file in collection') - return response.json() + }); + if (!response.ok) throw new Error("Error checking file in collection"); + return response.json(); }, // Obtener chunks de un archivo getChunksByFile: async ( collectionName: string, fileName: string, - limit?: number + limit?: number, ): Promise<{ - collection_name: string - file_name: string - chunks: Array<{ id: string; payload: any; vector?: number[] }> - total_chunks: number + collection_name: string; + file_name: string; + chunks: Array<{ id: string; payload: any; vector?: number[] }>; + total_chunks: number; }> => { const url = limit ? `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/files/${encodeURIComponent(fileName)}/chunks?limit=${limit}` - : `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/files/${encodeURIComponent(fileName)}/chunks` + : `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/files/${encodeURIComponent(fileName)}/chunks`; - const response = await fetch(url) - if (!response.ok) throw new Error('Error getting chunks') - return response.json() + const response = await fetch(url); + if (!response.ok) throw new Error("Error getting chunks"); + return response.json(); }, // Eliminar archivo de colección deleteFileFromCollection: async ( collectionName: string, - fileName: string - ): Promise<{ success: boolean; collection_name: string; file_name: string; chunks_deleted: number; message: string }> => { + fileName: string, + ): Promise<{ + success: boolean; + collection_name: string; + file_name: string; + chunks_deleted: number; + message: string; + }> => { const response = await fetch( `${API_BASE_URL}/vectors/collections/${encodeURIComponent(collectionName)}/files/${encodeURIComponent(fileName)}`, - { method: 'DELETE' } - ) - if (!response.ok) throw new Error('Error deleting file from collection') - return response.json() + { method: "DELETE" }, + ); + if (!response.ok) throw new Error("Error deleting file from collection"); + return response.json(); }, // Agregar chunks a una colección addChunks: async ( collectionName: string, - chunks: Array<{ id: string; vector: number[]; payload: any }> - ): Promise<{ success: boolean; collection_name: string; chunks_added: number; message: string }> => { + chunks: Array<{ id: string; vector: number[]; payload: any }>, + ): Promise<{ + success: boolean; + collection_name: string; + chunks_added: number; + message: string; + }> => { const response = await fetch(`${API_BASE_URL}/vectors/chunks/add`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify({ collection_name: collectionName, chunks: chunks, }), - }) - if (!response.ok) throw new Error('Error adding chunks') - return response.json() + }); + if (!response.ok) throw new Error("Error adding chunks"); + return response.json(); }, // ============================================================================ @@ -335,89 +458,89 @@ export const api = { // Obtener perfiles de chunking predefinidos getChunkingProfiles: async (): Promise<{ profiles: Array<{ - id: string - name: string - description: string - max_tokens: number - target_tokens: number - chunk_size: number - chunk_overlap: number - use_llm: boolean - }> + id: string; + name: string; + description: string; + max_tokens: number; + target_tokens: number; + chunk_size: number; + chunk_overlap: number; + use_llm: boolean; + }>; }> => { - const response = await fetch(`${API_BASE_URL}/chunking/profiles`) - if (!response.ok) throw new Error('Error fetching chunking profiles') - return response.json() + const response = await fetch(`${API_BASE_URL}/chunking/profiles`); + if (!response.ok) throw new Error("Error fetching chunking profiles"); + return response.json(); }, // Generar preview de chunks (hasta 3 chunks) generateChunkPreview: async (config: { - file_name: string - tema: string - max_tokens?: number - target_tokens?: number - chunk_size?: number - chunk_overlap?: number - use_llm?: boolean - custom_instructions?: string + file_name: string; + tema: string; + max_tokens?: number; + target_tokens?: number; + chunk_size?: number; + chunk_overlap?: number; + use_llm?: boolean; + custom_instructions?: string; }): Promise<{ - success: boolean - file_name: string - tema: string + success: boolean; + file_name: string; + tema: string; chunks: Array<{ - index: number - text: string - page: number - file_name: string - tokens: number - }> - message: string + index: number; + text: string; + page: number; + file_name: string; + tokens: number; + }>; + message: string; }> => { const response = await fetch(`${API_BASE_URL}/chunking/preview`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify(config), - }) + }); if (!response.ok) { - const error = await response.json() - throw new Error(error.detail || 'Error generating preview') + const error = await response.json(); + throw new Error(error.detail || "Error generating preview"); } - return response.json() + return response.json(); }, // Procesar PDF completo processChunkingFull: async (config: { - file_name: string - tema: string - collection_name: string - max_tokens?: number - target_tokens?: number - chunk_size?: number - chunk_overlap?: number - use_llm?: boolean - custom_instructions?: string + file_name: string; + tema: string; + collection_name: string; + max_tokens?: number; + target_tokens?: number; + chunk_size?: number; + chunk_overlap?: number; + use_llm?: boolean; + custom_instructions?: string; }): Promise<{ - success: boolean - collection_name: string - file_name: string - total_chunks: number - chunks_added: number - message: string + success: boolean; + collection_name: string; + file_name: string; + total_chunks: number; + chunks_added: number; + message: string; }> => { const response = await fetch(`${API_BASE_URL}/chunking/process`, { - method: 'POST', + method: "POST", headers: { - 'Content-Type': 'application/json', + "Content-Type": "application/json", }, body: JSON.stringify(config), - }) + }); if (!response.ok) { - const error = await response.json() - throw new Error(error.detail || 'Error processing PDF') + const error = await response.json(); + throw new Error(error.detail || "Error processing PDF"); } - return response.json() + return response.json(); }, // ============================================================================ @@ -427,62 +550,62 @@ export const api = { // Crear schema createSchema: async (schema: any): Promise => { const response = await fetch(`${API_BASE_URL}/schemas/`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(schema) - }) + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(schema), + }); if (!response.ok) { - const error = await response.json() - throw new Error(error.detail?.message || 'Error creando schema') + const error = await response.json(); + throw new Error(error.detail?.message || "Error creando schema"); } - return response.json() + return response.json(); }, // Listar schemas listSchemas: async (tema?: string): Promise => { const url = tema ? `${API_BASE_URL}/schemas/?tema=${encodeURIComponent(tema)}` - : `${API_BASE_URL}/schemas/` - const response = await fetch(url) - if (!response.ok) throw new Error('Error listando schemas') - return response.json() + : `${API_BASE_URL}/schemas/`; + const response = await fetch(url); + if (!response.ok) throw new Error("Error listando schemas"); + return response.json(); }, // Obtener schema por ID getSchema: async (schema_id: string): Promise => { - const response = await fetch(`${API_BASE_URL}/schemas/${schema_id}`) - if (!response.ok) throw new Error('Error obteniendo schema') - return response.json() + const response = await fetch(`${API_BASE_URL}/schemas/${schema_id}`); + if (!response.ok) throw new Error("Error obteniendo schema"); + return response.json(); }, // Actualizar schema updateSchema: async (schema_id: string, schema: any): Promise => { const response = await fetch(`${API_BASE_URL}/schemas/${schema_id}`, { - method: 'PUT', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(schema) - }) - if (!response.ok) throw new Error('Error actualizando schema') - return response.json() + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(schema), + }); + if (!response.ok) throw new Error("Error actualizando schema"); + return response.json(); }, // Eliminar schema deleteSchema: async (schema_id: string): Promise => { const response = await fetch(`${API_BASE_URL}/schemas/${schema_id}`, { - method: 'DELETE' - }) - if (!response.ok) throw new Error('Error eliminando schema') + method: "DELETE", + }); + if (!response.ok) throw new Error("Error eliminando schema"); }, // Validar schema validateSchema: async (schema: any): Promise => { const response = await fetch(`${API_BASE_URL}/schemas/validate`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(schema) - }) - if (!response.ok) throw new Error('Error validando schema') - return response.json() + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(schema), + }); + if (!response.ok) throw new Error("Error validando schema"); + return response.json(); }, // ============================================================================ @@ -491,25 +614,24 @@ export const api = { // Procesar con LandingAI processWithLandingAI: async (config: { - file_name: string - tema: string - collection_name: string - mode: 'quick' | 'extract' - schema_id?: string - include_chunk_types?: string[] - max_tokens_per_chunk?: number - merge_small_chunks?: boolean + file_name: string; + tema: string; + collection_name: string; + mode: "quick" | "extract"; + schema_id?: string; + include_chunk_types?: string[]; + max_tokens_per_chunk?: number; + merge_small_chunks?: boolean; }): Promise => { const response = await fetch(`${API_BASE_URL}/chunking-landingai/process`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(config) - }) + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(config), + }); if (!response.ok) { - const error = await response.json() - throw new Error(error.detail || 'Error procesando con LandingAI') + const error = await response.json(); + throw new Error(error.detail || "Error procesando con LandingAI"); } - return response.json() + return response.json(); }, - -} \ No newline at end of file +}; diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 4d810c3..9f2daad 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -1,12 +1,21 @@ -import { defineConfig } from 'vite' -import react from '@vitejs/plugin-react' -import path from 'path' +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; +import path from "path"; export default defineConfig({ plugins: [react()], resolve: { alias: { - '@': path.resolve(__dirname, './src'), + "@": path.resolve(__dirname, "./src"), }, }, -}) \ No newline at end of file + server: { + proxy: { + "/api": { + target: "http://backend:8000", + changeOrigin: true, + secure: false, + }, + }, + }, +});