Files
luma/backend/pyproject.toml

42 lines
1.0 KiB
TOML

[project]
name = "backend"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"azure-storage-blob>=12.26.0",
"fastapi>=0.116.1",
"pydantic-settings>=2.10.1",
"python-dotenv>=1.1.1",
"python-multipart>=0.0.20",
"qdrant-client>=1.15.1",
"uvicorn[standard]>=0.35.0",
# Chunking & PDF processing
"pypdf>=5.1.0",
"pdf2image>=1.17.0",
"pillow>=11.0.0",
# LLM & Embeddings
"openai>=1.59.6",
"google-cloud-aiplatform>=1.77.0",
"langchain>=0.3.12",
"langchain-core>=0.3.24",
"tiktoken>=0.8.0",
# WebSockets
"websockets>=14.1",
"langchain-text-splitters>=1.0.0",
# LandingAI Document AI
"landingai-ade>=0.2.1",
"redis-om>=0.3.5",
"pydantic-ai-slim[google,openai,mcp]>=1.11.1",
"tavily-python>=0.5.0",
]
[project.scripts]
dev = "uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload"
start = "uvicorn app.main:app --host 0.0.0.0 --port 8000"
[dependency-groups]
dev = [
"ruff>=0.14.4",
]