Compare commits
13 Commits
83ed64326f
...
push-omyxs
| Author | SHA1 | Date | |
|---|---|---|---|
| 132ea1c04f | |||
| 0cdf9cd44e | |||
| d39b8a6ea7 | |||
| 86ed34887b | |||
| 694b060fa4 | |||
| d69c4e4f4a | |||
| f6e122b5a9 | |||
| dba94107a5 | |||
| d3cd8d5291 | |||
| 8dfd2048a5 | |||
| 3e2386b9b6 | |||
|
|
42e1660143 | ||
| 208d5ebebf |
43
.gitea/workflows/ci.yml
Normal file
43
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,43 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: uv sync --frozen
|
||||
- name: Ruff check
|
||||
run: uv run ruff check
|
||||
- name: Ruff format check
|
||||
run: uv run ruff format --check
|
||||
|
||||
typecheck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: uv sync --frozen
|
||||
- name: Type check
|
||||
run: uv run ty check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: uv sync --frozen
|
||||
- name: Run tests
|
||||
run: uv run pytest --cov
|
||||
3
CLAUDE.md
Normal file
3
CLAUDE.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Use `uv` for project management
|
||||
Linter: `uv run ruff check`
|
||||
Type-checking: `uv run ty check`
|
||||
@@ -7,8 +7,8 @@ WORKDIR /app
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --no-dev --frozen
|
||||
|
||||
COPY main.py .
|
||||
COPY src/ src/
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
CMD ["uv", "run", "python", "main.py", "--transport", "streamable-http", "--port", "8000"]
|
||||
CMD ["uv", "run", "python", "-m", "knowledge_search_mcp", "--transport", "streamable-http", "--port", "8000"]
|
||||
|
||||
35
README.md
35
README.md
@@ -19,7 +19,7 @@ An MCP (Model Context Protocol) server that exposes a `knowledge_search` tool fo
|
||||
|
||||
## Configuration
|
||||
|
||||
Create a `.env` file (see `Settings` in `main.py` for all options):
|
||||
Create a `config.yaml` file or `.env` file (see `Settings` in `src/knowledge_search_mcp/config.py` for all options):
|
||||
|
||||
```env
|
||||
PROJECT_ID=my-gcp-project
|
||||
@@ -42,16 +42,25 @@ SEARCH_LIMIT=10
|
||||
uv sync
|
||||
```
|
||||
|
||||
### Run the MCP server (stdio)
|
||||
### Run the MCP server
|
||||
|
||||
**Using the installed command (recommended):**
|
||||
|
||||
```bash
|
||||
uv run python main.py
|
||||
# stdio transport (default)
|
||||
uv run knowledge-search-mcp
|
||||
|
||||
# SSE transport for remote clients
|
||||
uv run knowledge-search-mcp --transport sse --port 8080
|
||||
|
||||
# streamable-http transport
|
||||
uv run knowledge-search-mcp --transport streamable-http --port 8080
|
||||
```
|
||||
|
||||
### Run the MCP server (SSE, e.g. for remote clients)
|
||||
**Or run directly:**
|
||||
|
||||
```bash
|
||||
uv run python main.py --transport sse --port 8080
|
||||
uv run python -m knowledge_search_mcp.main
|
||||
```
|
||||
|
||||
### Run the interactive agent (ADK)
|
||||
@@ -68,6 +77,12 @@ Or connect to an already-running SSE server:
|
||||
uv run python agent.py --remote http://localhost:8080/sse
|
||||
```
|
||||
|
||||
### Run tests
|
||||
|
||||
```bash
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
## Docker
|
||||
|
||||
```bash
|
||||
@@ -80,8 +95,12 @@ The container starts the server in SSE mode on the port specified by `PORT` (def
|
||||
## Project structure
|
||||
|
||||
```
|
||||
main.py MCP server, vector search client, and GCS storage helper
|
||||
src/knowledge_search_mcp/
|
||||
├── __init__.py Package initialization
|
||||
├── config.py Configuration management (Settings, args parsing)
|
||||
├── logging.py Cloud Logging setup
|
||||
└── main.py MCP server, vector search client, and GCS storage helper
|
||||
agent.py Interactive ADK agent that consumes the MCP server
|
||||
Dockerfile Multi-stage build for Cloud Run / containerized deployment
|
||||
pyproject.toml Project metadata and dependencies
|
||||
tests/ Test suite
|
||||
pyproject.toml Project metadata, dependencies, and entry points
|
||||
```
|
||||
|
||||
803
main.py
803
main.py
@@ -1,803 +0,0 @@
|
||||
# ruff: noqa: INP001
|
||||
"""Async helpers for querying Vertex AI vector search via MCP."""
|
||||
|
||||
import asyncio
|
||||
import io
|
||||
from collections.abc import AsyncIterator, Sequence
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import BinaryIO, TypedDict
|
||||
|
||||
import aiohttp
|
||||
from gcloud.aio.auth import Token
|
||||
from gcloud.aio.storage import Storage
|
||||
from google import genai
|
||||
from google.genai import types as genai_types
|
||||
from mcp.server.fastmcp import Context, FastMCP
|
||||
|
||||
from utils import Settings, _args, cfg, log_structured_entry
|
||||
|
||||
HTTP_TOO_MANY_REQUESTS = 429
|
||||
HTTP_SERVER_ERROR = 500
|
||||
|
||||
|
||||
class SourceNamespace(str, Enum):
|
||||
"""Allowed values for the 'source' namespace filter."""
|
||||
|
||||
EDUCACION_FINANCIERA = "Educacion Financiera"
|
||||
PRODUCTOS_Y_SERVICIOS = "Productos y Servicios"
|
||||
FUNCIONALIDADES_APP_MOVIL = "Funcionalidades de la App Movil"
|
||||
|
||||
|
||||
class GoogleCloudFileStorage:
|
||||
"""Cache-aware helper for downloading files from Google Cloud Storage."""
|
||||
|
||||
def __init__(self, bucket: str) -> None:
|
||||
"""Initialize the storage helper."""
|
||||
self.bucket_name = bucket
|
||||
self._aio_session: aiohttp.ClientSession | None = None
|
||||
self._aio_storage: Storage | None = None
|
||||
self._cache: dict[str, bytes] = {}
|
||||
|
||||
def _get_aio_session(self) -> aiohttp.ClientSession:
|
||||
if self._aio_session is None or self._aio_session.closed:
|
||||
connector = aiohttp.TCPConnector(
|
||||
limit=300,
|
||||
limit_per_host=50,
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(total=60)
|
||||
self._aio_session = aiohttp.ClientSession(
|
||||
timeout=timeout,
|
||||
connector=connector,
|
||||
)
|
||||
return self._aio_session
|
||||
|
||||
def _get_aio_storage(self) -> Storage:
|
||||
if self._aio_storage is None:
|
||||
self._aio_storage = Storage(
|
||||
session=self._get_aio_session(),
|
||||
)
|
||||
return self._aio_storage
|
||||
|
||||
async def async_get_file_stream(
|
||||
self,
|
||||
file_name: str,
|
||||
max_retries: int = 3,
|
||||
) -> BinaryIO:
|
||||
"""Get a file asynchronously with retry on transient errors.
|
||||
|
||||
Args:
|
||||
file_name: The blob name to retrieve.
|
||||
max_retries: Maximum number of retry attempts.
|
||||
|
||||
Returns:
|
||||
A BytesIO stream with the file contents.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If all retry attempts fail.
|
||||
|
||||
"""
|
||||
if file_name in self._cache:
|
||||
log_structured_entry(
|
||||
"File retrieved from cache",
|
||||
"INFO",
|
||||
{"file": file_name, "bucket": self.bucket_name}
|
||||
)
|
||||
file_stream = io.BytesIO(self._cache[file_name])
|
||||
file_stream.name = file_name
|
||||
return file_stream
|
||||
|
||||
log_structured_entry(
|
||||
"Starting file download from GCS",
|
||||
"INFO",
|
||||
{"file": file_name, "bucket": self.bucket_name}
|
||||
)
|
||||
|
||||
storage_client = self._get_aio_storage()
|
||||
last_exception: Exception | None = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
self._cache[file_name] = await storage_client.download(
|
||||
self.bucket_name,
|
||||
file_name,
|
||||
)
|
||||
file_stream = io.BytesIO(self._cache[file_name])
|
||||
file_stream.name = file_name
|
||||
log_structured_entry(
|
||||
"File downloaded successfully",
|
||||
"INFO",
|
||||
{
|
||||
"file": file_name,
|
||||
"bucket": self.bucket_name,
|
||||
"size_bytes": len(self._cache[file_name]),
|
||||
"attempt": attempt + 1
|
||||
}
|
||||
)
|
||||
except TimeoutError as exc:
|
||||
last_exception = exc
|
||||
log_structured_entry(
|
||||
f"Timeout downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
|
||||
"WARNING",
|
||||
{"error": str(exc)}
|
||||
)
|
||||
except aiohttp.ClientResponseError as exc:
|
||||
last_exception = exc
|
||||
if (
|
||||
exc.status == HTTP_TOO_MANY_REQUESTS
|
||||
or exc.status >= HTTP_SERVER_ERROR
|
||||
):
|
||||
log_structured_entry(
|
||||
f"HTTP {exc.status} downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
|
||||
"WARNING",
|
||||
{"status": exc.status, "message": str(exc)}
|
||||
)
|
||||
else:
|
||||
log_structured_entry(
|
||||
f"Non-retryable HTTP error downloading gs://{self.bucket_name}/{file_name}",
|
||||
"ERROR",
|
||||
{"status": exc.status, "message": str(exc)}
|
||||
)
|
||||
raise
|
||||
else:
|
||||
return file_stream
|
||||
|
||||
if attempt < max_retries - 1:
|
||||
delay = 0.5 * (2**attempt)
|
||||
log_structured_entry(
|
||||
"Retrying file download",
|
||||
"INFO",
|
||||
{"file": file_name, "delay_seconds": delay}
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
msg = (
|
||||
f"Failed to download gs://{self.bucket_name}/{file_name} "
|
||||
f"after {max_retries} attempts"
|
||||
)
|
||||
log_structured_entry(
|
||||
"File download failed after all retries",
|
||||
"ERROR",
|
||||
{
|
||||
"file": file_name,
|
||||
"bucket": self.bucket_name,
|
||||
"max_retries": max_retries,
|
||||
"last_error": str(last_exception)
|
||||
}
|
||||
)
|
||||
raise TimeoutError(msg) from last_exception
|
||||
|
||||
|
||||
class SearchResult(TypedDict):
|
||||
"""Structured response item returned by the vector search API."""
|
||||
|
||||
id: str
|
||||
distance: float
|
||||
content: str
|
||||
|
||||
|
||||
class GoogleCloudVectorSearch:
|
||||
"""Minimal async client for the Vertex AI Matching Engine REST API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
project_id: str,
|
||||
location: str,
|
||||
bucket: str,
|
||||
index_name: str | None = None,
|
||||
) -> None:
|
||||
"""Store configuration used to issue Matching Engine queries."""
|
||||
self.project_id = project_id
|
||||
self.location = location
|
||||
self.storage = GoogleCloudFileStorage(bucket=bucket)
|
||||
self.index_name = index_name
|
||||
self._aio_session: aiohttp.ClientSession | None = None
|
||||
self._async_token: Token | None = None
|
||||
self._endpoint_domain: str | None = None
|
||||
self._endpoint_name: str | None = None
|
||||
|
||||
async def _async_get_auth_headers(self) -> dict[str, str]:
|
||||
if self._async_token is None:
|
||||
self._async_token = Token(
|
||||
session=self._get_aio_session(),
|
||||
scopes=[
|
||||
"https://www.googleapis.com/auth/cloud-platform",
|
||||
],
|
||||
)
|
||||
access_token = await self._async_token.get()
|
||||
return {
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def _get_aio_session(self) -> aiohttp.ClientSession:
|
||||
if self._aio_session is None or self._aio_session.closed:
|
||||
connector = aiohttp.TCPConnector(
|
||||
limit=300,
|
||||
limit_per_host=50,
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(total=60)
|
||||
self._aio_session = aiohttp.ClientSession(
|
||||
timeout=timeout,
|
||||
connector=connector,
|
||||
)
|
||||
return self._aio_session
|
||||
|
||||
def configure_index_endpoint(
|
||||
self,
|
||||
*,
|
||||
name: str,
|
||||
public_domain: str,
|
||||
) -> None:
|
||||
"""Persist the metadata needed to access a deployed endpoint."""
|
||||
if not name:
|
||||
msg = "Index endpoint name must be a non-empty string."
|
||||
raise ValueError(msg)
|
||||
if not public_domain:
|
||||
msg = "Index endpoint domain must be a non-empty public domain."
|
||||
raise ValueError(msg)
|
||||
self._endpoint_name = name
|
||||
self._endpoint_domain = public_domain
|
||||
|
||||
async def async_run_query(
|
||||
self,
|
||||
deployed_index_id: str,
|
||||
query: Sequence[float],
|
||||
limit: int,
|
||||
source: SourceNamespace | None = None,
|
||||
) -> list[SearchResult]:
|
||||
"""Run an async similarity search via the REST API.
|
||||
|
||||
Args:
|
||||
deployed_index_id: The ID of the deployed index.
|
||||
query: The embedding vector for the search query.
|
||||
limit: Maximum number of nearest neighbors to return.
|
||||
source: Optional namespace filter to restrict results by source.
|
||||
|
||||
Returns:
|
||||
A list of matched items with id, distance, and content.
|
||||
|
||||
"""
|
||||
if self._endpoint_domain is None or self._endpoint_name is None:
|
||||
msg = (
|
||||
"Missing endpoint metadata. Call "
|
||||
"`configure_index_endpoint` before querying."
|
||||
)
|
||||
log_structured_entry(
|
||||
"Vector search query failed - endpoint not configured",
|
||||
"ERROR",
|
||||
{"error": msg}
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
domain = self._endpoint_domain
|
||||
endpoint_id = self._endpoint_name.split("/")[-1]
|
||||
url = (
|
||||
f"https://{domain}/v1/projects/{self.project_id}"
|
||||
f"/locations/{self.location}"
|
||||
f"/indexEndpoints/{endpoint_id}:findNeighbors"
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Starting vector search query",
|
||||
"INFO",
|
||||
{
|
||||
"deployed_index_id": deployed_index_id,
|
||||
"neighbor_count": limit,
|
||||
"endpoint_id": endpoint_id,
|
||||
"embedding_dimension": len(query)
|
||||
}
|
||||
)
|
||||
|
||||
datapoint: dict = {"feature_vector": list(query)}
|
||||
if source is not None:
|
||||
datapoint["restricts"] = [
|
||||
{"namespace": "source", "allow_list": [source.value]},
|
||||
]
|
||||
payload = {
|
||||
"deployed_index_id": deployed_index_id,
|
||||
"queries": [
|
||||
{
|
||||
"datapoint": datapoint,
|
||||
"neighbor_count": limit,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
try:
|
||||
headers = await self._async_get_auth_headers()
|
||||
session = self._get_aio_session()
|
||||
async with session.post(
|
||||
url,
|
||||
json=payload,
|
||||
headers=headers,
|
||||
) as response:
|
||||
if not response.ok:
|
||||
body = await response.text()
|
||||
msg = f"findNeighbors returned {response.status}: {body}"
|
||||
log_structured_entry(
|
||||
"Vector search API request failed",
|
||||
"ERROR",
|
||||
{
|
||||
"status": response.status,
|
||||
"response_body": body,
|
||||
"deployed_index_id": deployed_index_id
|
||||
}
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
data = await response.json()
|
||||
|
||||
neighbors = data.get("nearestNeighbors", [{}])[0].get("neighbors", [])
|
||||
log_structured_entry(
|
||||
"Vector search API request successful",
|
||||
"INFO",
|
||||
{
|
||||
"neighbors_found": len(neighbors),
|
||||
"deployed_index_id": deployed_index_id
|
||||
}
|
||||
)
|
||||
|
||||
if not neighbors:
|
||||
log_structured_entry(
|
||||
"No neighbors found in vector search",
|
||||
"WARNING",
|
||||
{"deployed_index_id": deployed_index_id}
|
||||
)
|
||||
return []
|
||||
|
||||
# Fetch content for all neighbors
|
||||
content_tasks = []
|
||||
for neighbor in neighbors:
|
||||
datapoint_id = neighbor["datapoint"]["datapointId"]
|
||||
file_path = f"{self.index_name}/contents/{datapoint_id}.md"
|
||||
content_tasks.append(
|
||||
self.storage.async_get_file_stream(file_path),
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Fetching content for search results",
|
||||
"INFO",
|
||||
{"file_count": len(content_tasks)}
|
||||
)
|
||||
|
||||
file_streams = await asyncio.gather(*content_tasks)
|
||||
results: list[SearchResult] = []
|
||||
for neighbor, stream in zip(
|
||||
neighbors,
|
||||
file_streams,
|
||||
strict=True,
|
||||
):
|
||||
results.append(
|
||||
SearchResult(
|
||||
id=neighbor["datapoint"]["datapointId"],
|
||||
distance=neighbor["distance"],
|
||||
content=stream.read().decode("utf-8"),
|
||||
),
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Vector search completed successfully",
|
||||
"INFO",
|
||||
{
|
||||
"results_count": len(results),
|
||||
"deployed_index_id": deployed_index_id
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Vector search query failed with exception",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"deployed_index_id": deployed_index_id
|
||||
}
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MCP Server
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppContext:
|
||||
"""Shared resources initialised once at server startup."""
|
||||
|
||||
vector_search: GoogleCloudVectorSearch
|
||||
genai_client: genai.Client
|
||||
settings: Settings
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_server: FastMCP) -> AsyncIterator[AppContext]:
|
||||
"""Create and configure the vector-search client for the server lifetime."""
|
||||
log_structured_entry(
|
||||
"Initializing MCP server",
|
||||
"INFO",
|
||||
{
|
||||
"project_id": cfg.project_id,
|
||||
"location": cfg.location,
|
||||
"bucket": cfg.bucket,
|
||||
"index_name": cfg.index_name,
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
# Initialize vector search client
|
||||
log_structured_entry("Creating GoogleCloudVectorSearch client", "INFO")
|
||||
vs = GoogleCloudVectorSearch(
|
||||
project_id=cfg.project_id,
|
||||
location=cfg.location,
|
||||
bucket=cfg.bucket,
|
||||
index_name=cfg.index_name,
|
||||
)
|
||||
|
||||
# Configure endpoint
|
||||
log_structured_entry(
|
||||
"Configuring index endpoint",
|
||||
"INFO",
|
||||
{
|
||||
"endpoint_name": cfg.endpoint_name,
|
||||
"endpoint_domain": cfg.endpoint_domain,
|
||||
}
|
||||
)
|
||||
vs.configure_index_endpoint(
|
||||
name=cfg.endpoint_name,
|
||||
public_domain=cfg.endpoint_domain,
|
||||
)
|
||||
|
||||
# Initialize GenAI client
|
||||
log_structured_entry(
|
||||
"Creating GenAI client",
|
||||
"INFO",
|
||||
{"project_id": cfg.project_id, "location": cfg.location}
|
||||
)
|
||||
genai_client = genai.Client(
|
||||
vertexai=True,
|
||||
project=cfg.project_id,
|
||||
location=cfg.location,
|
||||
)
|
||||
|
||||
# Validate credentials and configuration by testing actual resources
|
||||
# These validations are non-blocking - errors are logged but won't stop startup
|
||||
log_structured_entry("Starting validation of credentials and resources", "INFO")
|
||||
|
||||
validation_errors = []
|
||||
|
||||
# 1. Validate GenAI embedding access
|
||||
log_structured_entry("Validating GenAI embedding access", "INFO")
|
||||
try:
|
||||
test_response = await genai_client.aio.models.embed_content(
|
||||
model=cfg.embedding_model,
|
||||
contents="test",
|
||||
config=genai_types.EmbedContentConfig(
|
||||
task_type="RETRIEVAL_QUERY",
|
||||
),
|
||||
)
|
||||
if test_response and test_response.embeddings:
|
||||
embedding_values = test_response.embeddings[0].values
|
||||
log_structured_entry(
|
||||
"GenAI embedding validation successful",
|
||||
"INFO",
|
||||
{"embedding_dimension": len(embedding_values) if embedding_values else 0}
|
||||
)
|
||||
else:
|
||||
msg = "Embedding validation returned empty response"
|
||||
log_structured_entry(msg, "WARNING")
|
||||
validation_errors.append(msg)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Failed to validate GenAI embedding access - service may not work correctly",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__}
|
||||
)
|
||||
validation_errors.append(f"GenAI: {str(e)}")
|
||||
|
||||
# 2. Validate GCS bucket access
|
||||
log_structured_entry(
|
||||
"Validating GCS bucket access",
|
||||
"INFO",
|
||||
{"bucket": cfg.bucket}
|
||||
)
|
||||
try:
|
||||
session = vs.storage._get_aio_session()
|
||||
token_obj = Token(
|
||||
session=session,
|
||||
scopes=["https://www.googleapis.com/auth/cloud-platform"],
|
||||
)
|
||||
access_token = await token_obj.get()
|
||||
headers = {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
async with session.get(
|
||||
f"https://storage.googleapis.com/storage/v1/b/{cfg.bucket}/o?maxResults=1",
|
||||
headers=headers,
|
||||
) as response:
|
||||
if response.status == 403:
|
||||
msg = f"Access denied to bucket '{cfg.bucket}'. Check permissions."
|
||||
log_structured_entry(
|
||||
"GCS bucket validation failed - access denied - service may not work correctly",
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
elif response.status == 404:
|
||||
msg = f"Bucket '{cfg.bucket}' not found. Check bucket name and project."
|
||||
log_structured_entry(
|
||||
"GCS bucket validation failed - not found - service may not work correctly",
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
elif not response.ok:
|
||||
body = await response.text()
|
||||
msg = f"Failed to access bucket '{cfg.bucket}': {response.status}"
|
||||
log_structured_entry(
|
||||
"GCS bucket validation failed - service may not work correctly",
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status, "response": body}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
else:
|
||||
log_structured_entry(
|
||||
"GCS bucket validation successful",
|
||||
"INFO",
|
||||
{"bucket": cfg.bucket}
|
||||
)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Failed to validate GCS bucket access - service may not work correctly",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__, "bucket": cfg.bucket}
|
||||
)
|
||||
validation_errors.append(f"GCS: {str(e)}")
|
||||
|
||||
# 3. Validate vector search endpoint access
|
||||
log_structured_entry(
|
||||
"Validating vector search endpoint access",
|
||||
"INFO",
|
||||
{"endpoint_name": cfg.endpoint_name}
|
||||
)
|
||||
try:
|
||||
# Try to get endpoint info
|
||||
headers = await vs._async_get_auth_headers()
|
||||
session = vs._get_aio_session()
|
||||
endpoint_url = (
|
||||
f"https://{cfg.location}-aiplatform.googleapis.com/v1/{cfg.endpoint_name}"
|
||||
)
|
||||
|
||||
async with session.get(endpoint_url, headers=headers) as response:
|
||||
if response.status == 403:
|
||||
msg = f"Access denied to endpoint '{cfg.endpoint_name}'. Check permissions."
|
||||
log_structured_entry(
|
||||
"Vector search endpoint validation failed - access denied - service may not work correctly",
|
||||
"WARNING",
|
||||
{"endpoint": cfg.endpoint_name, "status": response.status}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
elif response.status == 404:
|
||||
msg = f"Endpoint '{cfg.endpoint_name}' not found. Check endpoint name and project."
|
||||
log_structured_entry(
|
||||
"Vector search endpoint validation failed - not found - service may not work correctly",
|
||||
"WARNING",
|
||||
{"endpoint": cfg.endpoint_name, "status": response.status}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
elif not response.ok:
|
||||
body = await response.text()
|
||||
msg = f"Failed to access endpoint '{cfg.endpoint_name}': {response.status}"
|
||||
log_structured_entry(
|
||||
"Vector search endpoint validation failed - service may not work correctly",
|
||||
"WARNING",
|
||||
{"endpoint": cfg.endpoint_name, "status": response.status, "response": body}
|
||||
)
|
||||
validation_errors.append(msg)
|
||||
else:
|
||||
log_structured_entry(
|
||||
"Vector search endpoint validation successful",
|
||||
"INFO",
|
||||
{"endpoint": cfg.endpoint_name}
|
||||
)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Failed to validate vector search endpoint access - service may not work correctly",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__, "endpoint": cfg.endpoint_name}
|
||||
)
|
||||
validation_errors.append(f"Vector Search: {str(e)}")
|
||||
|
||||
# Summary of validations
|
||||
if validation_errors:
|
||||
log_structured_entry(
|
||||
"MCP server started with validation errors - service may not work correctly",
|
||||
"WARNING",
|
||||
{"validation_errors": validation_errors, "error_count": len(validation_errors)}
|
||||
)
|
||||
else:
|
||||
log_structured_entry("All validations passed - MCP server initialization complete", "INFO")
|
||||
|
||||
yield AppContext(
|
||||
vector_search=vs,
|
||||
genai_client=genai_client,
|
||||
settings=cfg,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Failed to initialize MCP server",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
}
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
log_structured_entry("MCP server lifespan ending", "INFO")
|
||||
|
||||
|
||||
mcp = FastMCP(
|
||||
"knowledge-search",
|
||||
host=_args.host,
|
||||
port=_args.port,
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
||||
@mcp.tool()
|
||||
async def knowledge_search(
|
||||
query: str,
|
||||
ctx: Context,
|
||||
source: SourceNamespace | None = None,
|
||||
) -> str:
|
||||
"""Search a knowledge base using a natural-language query.
|
||||
|
||||
Args:
|
||||
query: The text query to search for.
|
||||
ctx: MCP request context (injected automatically).
|
||||
source: Optional filter to restrict results by source.
|
||||
Allowed values: 'Educacion Financiera',
|
||||
'Productos y Servicios', 'Funcionalidades de la App Movil'.
|
||||
|
||||
Returns:
|
||||
A formatted string containing matched documents with id and content.
|
||||
|
||||
"""
|
||||
import time
|
||||
|
||||
app: AppContext = ctx.request_context.lifespan_context
|
||||
t0 = time.perf_counter()
|
||||
min_sim = 0.6
|
||||
|
||||
log_structured_entry(
|
||||
"knowledge_search request received",
|
||||
"INFO",
|
||||
{"query": query[:100]} # Log first 100 chars of query
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate embedding for the query
|
||||
log_structured_entry("Generating query embedding", "INFO")
|
||||
try:
|
||||
response = await app.genai_client.aio.models.embed_content(
|
||||
model=app.settings.embedding_model,
|
||||
contents=query,
|
||||
config=genai_types.EmbedContentConfig(
|
||||
task_type="RETRIEVAL_QUERY",
|
||||
),
|
||||
)
|
||||
embedding = response.embeddings[0].values
|
||||
t_embed = time.perf_counter()
|
||||
log_structured_entry(
|
||||
"Query embedding generated successfully",
|
||||
"INFO",
|
||||
{"time_ms": round((t_embed - t0) * 1000, 1)}
|
||||
)
|
||||
except Exception as e:
|
||||
error_type = type(e).__name__
|
||||
error_msg = str(e)
|
||||
|
||||
# Check if it's a rate limit error
|
||||
if "429" in error_msg or "RESOURCE_EXHAUSTED" in error_msg:
|
||||
log_structured_entry(
|
||||
"Rate limit exceeded while generating embedding",
|
||||
"WARNING",
|
||||
{
|
||||
"error": error_msg,
|
||||
"error_type": error_type,
|
||||
"query": query[:100]
|
||||
}
|
||||
)
|
||||
return "Error: API rate limit exceeded. Please try again later."
|
||||
else:
|
||||
log_structured_entry(
|
||||
"Failed to generate query embedding",
|
||||
"ERROR",
|
||||
{
|
||||
"error": error_msg,
|
||||
"error_type": error_type,
|
||||
"query": query[:100]
|
||||
}
|
||||
)
|
||||
return f"Error generating embedding: {error_msg}"
|
||||
|
||||
# Perform vector search
|
||||
log_structured_entry("Performing vector search", "INFO")
|
||||
try:
|
||||
search_results = await app.vector_search.async_run_query(
|
||||
deployed_index_id=app.settings.deployed_index_id,
|
||||
query=embedding,
|
||||
limit=app.settings.search_limit,
|
||||
source=source,
|
||||
)
|
||||
t_search = time.perf_counter()
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Vector search failed",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"query": query[:100]
|
||||
}
|
||||
)
|
||||
return f"Error performing vector search: {str(e)}"
|
||||
|
||||
# Apply similarity filtering
|
||||
if search_results:
|
||||
max_sim = max(r["distance"] for r in search_results)
|
||||
cutoff = max_sim * 0.9
|
||||
search_results = [
|
||||
s
|
||||
for s in search_results
|
||||
if s["distance"] > cutoff and s["distance"] > min_sim
|
||||
]
|
||||
|
||||
log_structured_entry(
|
||||
"knowledge_search completed successfully",
|
||||
"INFO",
|
||||
{
|
||||
"embedding_ms": f"{round((t_embed - t0) * 1000, 1)}ms",
|
||||
"vector_search_ms": f"{round((t_search - t_embed) * 1000, 1)}ms",
|
||||
"total_ms": f"{round((t_search - t0) * 1000, 1)}ms",
|
||||
"source_filter": source.value if source is not None else None,
|
||||
"results_count": len(search_results),
|
||||
"chunks": [s["id"] for s in search_results]
|
||||
}
|
||||
)
|
||||
|
||||
# Format results as XML-like documents
|
||||
if not search_results:
|
||||
log_structured_entry(
|
||||
"No results found for query",
|
||||
"INFO",
|
||||
{"query": query[:100]}
|
||||
)
|
||||
return "No relevant documents found for your query."
|
||||
|
||||
formatted_results = [
|
||||
f"<document {i} name={result['id']}>\n{result['content']}\n</document {i}>"
|
||||
for i, result in enumerate(search_results, start=1)
|
||||
]
|
||||
return "\n".join(formatted_results)
|
||||
|
||||
except Exception as e:
|
||||
# Catch-all for any unexpected errors
|
||||
log_structured_entry(
|
||||
"Unexpected error in knowledge_search",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"query": query[:100]
|
||||
}
|
||||
)
|
||||
return f"Unexpected error during search: {str(e)}"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mcp.run(transport=_args.transport)
|
||||
@@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "knowledge-search-mcp"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
description = "MCP server for semantic search over Vertex AI Vector Search"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
@@ -13,11 +13,44 @@ dependencies = [
|
||||
"mcp[cli]>=1.26.0",
|
||||
"pydantic-settings>=2.9.1",
|
||||
"pyyaml>=6.0",
|
||||
"redis[hiredis]>=5.0.0,<7",
|
||||
"redisvl>=0.6.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
knowledge-search-mcp = "knowledge_search_mcp.__main__:main"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"google-adk>=1.25.1",
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.24.0",
|
||||
"pytest-cov>=6.0.0",
|
||||
"ruff>=0.15.2",
|
||||
"ty>=0.0.18",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
pythonpath = ["."]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.8.3,<0.9.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.ruff]
|
||||
exclude = ["scripts", "tests"]
|
||||
|
||||
[tool.ty.src]
|
||||
exclude = ["scripts", "tests"]
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ['ALL']
|
||||
ignore = [
|
||||
'D203', # one-blank-line-before-class
|
||||
'D213', # multi-line-summary-second-line
|
||||
'COM812', # missing-trailing-comma
|
||||
'ANN401', # dynamically-typed-any
|
||||
'ERA001', # commented-out-code
|
||||
]
|
||||
|
||||
@@ -23,7 +23,7 @@ if project := os.environ.get("PROJECT_ID"):
|
||||
if location := os.environ.get("LOCATION"):
|
||||
os.environ.setdefault("GOOGLE_CLOUD_LOCATION", location)
|
||||
|
||||
SERVER_SCRIPT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "main.py")
|
||||
SERVER_SCRIPT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "src", "knowledge_search_mcp", "main.py")
|
||||
|
||||
|
||||
def _parse_args() -> argparse.Namespace:
|
||||
15
src/knowledge_search_mcp/__init__.py
Normal file
15
src/knowledge_search_mcp/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""MCP server for semantic search over Vertex AI Vector Search."""
|
||||
|
||||
from .clients.storage import GoogleCloudFileStorage
|
||||
from .clients.vector_search import GoogleCloudVectorSearch
|
||||
from .models import AppContext, SearchResult, SourceNamespace
|
||||
from .utils.cache import LRUCache
|
||||
|
||||
__all__ = [
|
||||
"AppContext",
|
||||
"GoogleCloudFileStorage",
|
||||
"GoogleCloudVectorSearch",
|
||||
"LRUCache",
|
||||
"SearchResult",
|
||||
"SourceNamespace",
|
||||
]
|
||||
152
src/knowledge_search_mcp/__main__.py
Normal file
152
src/knowledge_search_mcp/__main__.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""MCP server for semantic search over Vertex AI Vector Search."""
|
||||
|
||||
import time
|
||||
|
||||
from mcp.server.fastmcp import Context, FastMCP
|
||||
|
||||
from .config import _args
|
||||
from .logging import log_structured_entry
|
||||
from .models import AppContext, SourceNamespace
|
||||
from .server import lifespan
|
||||
from .services.search import (
|
||||
filter_search_results,
|
||||
format_search_results,
|
||||
generate_query_embedding,
|
||||
)
|
||||
|
||||
mcp = FastMCP(
|
||||
"knowledge-search",
|
||||
host=_args.host,
|
||||
port=_args.port,
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
||||
@mcp.tool()
|
||||
async def knowledge_search(
|
||||
query: str,
|
||||
ctx: Context,
|
||||
source: SourceNamespace | None = None,
|
||||
) -> str:
|
||||
"""Search a knowledge base using a natural-language query.
|
||||
|
||||
Args:
|
||||
query: The text query to search for.
|
||||
ctx: MCP request context (injected automatically).
|
||||
source: Optional filter to restrict results by source.
|
||||
Allowed values: 'Educacion Financiera',
|
||||
'Productos y Servicios', 'Funcionalidades de la App Movil'.
|
||||
|
||||
Returns:
|
||||
A formatted string containing matched documents with id and content.
|
||||
|
||||
"""
|
||||
app: AppContext = ctx.request_context.lifespan_context
|
||||
t0 = time.perf_counter()
|
||||
|
||||
log_structured_entry(
|
||||
"knowledge_search request received",
|
||||
"INFO",
|
||||
{"query": query[:100]}, # Log first 100 chars of query
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate embedding for the query
|
||||
embedding, error = await generate_query_embedding(
|
||||
app.genai_client,
|
||||
app.settings.embedding_model,
|
||||
query,
|
||||
)
|
||||
if error:
|
||||
return error
|
||||
|
||||
t_embed = time.perf_counter()
|
||||
log_structured_entry(
|
||||
"Query embedding generated successfully",
|
||||
"INFO",
|
||||
{"time_ms": round((t_embed - t0) * 1000, 1)},
|
||||
)
|
||||
|
||||
# Check semantic cache before vector search
|
||||
if app.semantic_cache is not None and source is None:
|
||||
cached = await app.semantic_cache.check(embedding)
|
||||
if cached is not None:
|
||||
t_cache = time.perf_counter()
|
||||
log_structured_entry(
|
||||
"knowledge_search completed from cache",
|
||||
"INFO",
|
||||
{
|
||||
"embedding_ms": f"{round((t_embed - t0) * 1000, 1)}ms",
|
||||
"cache_check_ms": f"{round((t_cache - t_embed) * 1000, 1)}ms",
|
||||
"total_ms": f"{round((t_cache - t0) * 1000, 1)}ms",
|
||||
"cache_hit": True,
|
||||
},
|
||||
)
|
||||
return cached
|
||||
|
||||
# Perform vector search
|
||||
log_structured_entry("Performing vector search", "INFO")
|
||||
try:
|
||||
search_results = await app.vector_search.async_run_query(
|
||||
deployed_index_id=app.settings.deployed_index_id,
|
||||
query=embedding,
|
||||
limit=app.settings.search_limit,
|
||||
source=source,
|
||||
)
|
||||
t_search = time.perf_counter()
|
||||
except Exception as e: # noqa: BLE001
|
||||
log_structured_entry(
|
||||
"Vector search failed",
|
||||
"ERROR",
|
||||
{"error": str(e), "error_type": type(e).__name__, "query": query[:100]},
|
||||
)
|
||||
return f"Error performing vector search: {e!s}"
|
||||
|
||||
# Apply similarity filtering
|
||||
filtered_results = filter_search_results(search_results)
|
||||
|
||||
log_structured_entry(
|
||||
"knowledge_search completed successfully",
|
||||
"INFO",
|
||||
{
|
||||
"embedding_ms": f"{round((t_embed - t0) * 1000, 1)}ms",
|
||||
"vector_search_ms": f"{round((t_search - t_embed) * 1000, 1)}ms",
|
||||
"total_ms": f"{round((t_search - t0) * 1000, 1)}ms",
|
||||
"source_filter": source.value if source is not None else None,
|
||||
"results_count": len(filtered_results),
|
||||
"chunks": [s["id"] for s in filtered_results],
|
||||
"cache_hit": False,
|
||||
},
|
||||
)
|
||||
|
||||
# Format and return results
|
||||
formatted = format_search_results(filtered_results)
|
||||
|
||||
if not filtered_results:
|
||||
log_structured_entry(
|
||||
"No results found for query", "INFO", {"query": query[:100]}
|
||||
)
|
||||
|
||||
# Store in semantic cache (only for unfiltered queries with results)
|
||||
if app.semantic_cache is not None and source is None and filtered_results:
|
||||
await app.semantic_cache.store(query, formatted, embedding)
|
||||
|
||||
return formatted
|
||||
|
||||
except Exception as e: # noqa: BLE001
|
||||
# Catch-all for any unexpected errors
|
||||
log_structured_entry(
|
||||
"Unexpected error in knowledge_search",
|
||||
"ERROR",
|
||||
{"error": str(e), "error_type": type(e).__name__, "query": query[:100]},
|
||||
)
|
||||
return f"Unexpected error during search: {e!s}"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Entry point for the MCP server."""
|
||||
mcp.run(transport=_args.transport)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
11
src/knowledge_search_mcp/clients/__init__.py
Normal file
11
src/knowledge_search_mcp/clients/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Client modules for Google Cloud services."""
|
||||
|
||||
from .base import BaseGoogleCloudClient
|
||||
from .storage import GoogleCloudFileStorage
|
||||
from .vector_search import GoogleCloudVectorSearch
|
||||
|
||||
__all__ = [
|
||||
"BaseGoogleCloudClient",
|
||||
"GoogleCloudFileStorage",
|
||||
"GoogleCloudVectorSearch",
|
||||
]
|
||||
30
src/knowledge_search_mcp/clients/base.py
Normal file
30
src/knowledge_search_mcp/clients/base.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Base client with shared aiohttp session management."""
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
||||
class BaseGoogleCloudClient:
|
||||
"""Base class with shared aiohttp session management."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize session tracking."""
|
||||
self._aio_session: aiohttp.ClientSession | None = None
|
||||
|
||||
def _get_aio_session(self) -> aiohttp.ClientSession:
|
||||
"""Get or create aiohttp session with connection pooling."""
|
||||
if self._aio_session is None or self._aio_session.closed:
|
||||
connector = aiohttp.TCPConnector(
|
||||
limit=300,
|
||||
limit_per_host=50,
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(total=60)
|
||||
self._aio_session = aiohttp.ClientSession(
|
||||
timeout=timeout,
|
||||
connector=connector,
|
||||
)
|
||||
return self._aio_session
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close aiohttp session if open."""
|
||||
if self._aio_session and not self._aio_session.closed:
|
||||
await self._aio_session.close()
|
||||
150
src/knowledge_search_mcp/clients/storage.py
Normal file
150
src/knowledge_search_mcp/clients/storage.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Google Cloud Storage client with caching."""
|
||||
|
||||
import asyncio
|
||||
import io
|
||||
from typing import BinaryIO
|
||||
|
||||
import aiohttp
|
||||
from gcloud.aio.storage import Storage
|
||||
|
||||
from knowledge_search_mcp.logging import log_structured_entry
|
||||
from knowledge_search_mcp.utils.cache import LRUCache
|
||||
|
||||
from .base import BaseGoogleCloudClient
|
||||
|
||||
HTTP_TOO_MANY_REQUESTS = 429
|
||||
HTTP_SERVER_ERROR = 500
|
||||
|
||||
|
||||
class GoogleCloudFileStorage(BaseGoogleCloudClient):
|
||||
"""Cache-aware helper for downloading files from Google Cloud Storage."""
|
||||
|
||||
def __init__(self, bucket: str, cache_size: int = 100) -> None:
|
||||
"""Initialize the storage helper with LRU cache."""
|
||||
super().__init__()
|
||||
self.bucket_name = bucket
|
||||
self._aio_storage: Storage | None = None
|
||||
self._cache = LRUCache(max_size=cache_size)
|
||||
|
||||
def _get_aio_storage(self) -> Storage:
|
||||
if self._aio_storage is None:
|
||||
self._aio_storage = Storage(
|
||||
session=self._get_aio_session(),
|
||||
)
|
||||
return self._aio_storage
|
||||
|
||||
async def async_get_file_stream(
|
||||
self,
|
||||
file_name: str,
|
||||
max_retries: int = 3,
|
||||
) -> BinaryIO:
|
||||
"""Get a file asynchronously with retry on transient errors.
|
||||
|
||||
Args:
|
||||
file_name: The blob name to retrieve.
|
||||
max_retries: Maximum number of retry attempts.
|
||||
|
||||
Returns:
|
||||
A BytesIO stream with the file contents.
|
||||
|
||||
Raises:
|
||||
TimeoutError: If all retry attempts fail.
|
||||
|
||||
"""
|
||||
cached_content = self._cache.get(file_name)
|
||||
if cached_content is not None:
|
||||
log_structured_entry(
|
||||
"File retrieved from cache",
|
||||
"INFO",
|
||||
{"file": file_name, "bucket": self.bucket_name},
|
||||
)
|
||||
file_stream = io.BytesIO(cached_content)
|
||||
file_stream.name = file_name
|
||||
return file_stream
|
||||
|
||||
log_structured_entry(
|
||||
"Starting file download from GCS",
|
||||
"INFO",
|
||||
{"file": file_name, "bucket": self.bucket_name},
|
||||
)
|
||||
|
||||
storage_client = self._get_aio_storage()
|
||||
last_exception: Exception | None = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
content = await storage_client.download(
|
||||
self.bucket_name,
|
||||
file_name,
|
||||
)
|
||||
self._cache.put(file_name, content)
|
||||
file_stream = io.BytesIO(content)
|
||||
file_stream.name = file_name
|
||||
log_structured_entry(
|
||||
"File downloaded successfully",
|
||||
"INFO",
|
||||
{
|
||||
"file": file_name,
|
||||
"bucket": self.bucket_name,
|
||||
"size_bytes": len(content),
|
||||
"attempt": attempt + 1,
|
||||
},
|
||||
)
|
||||
except TimeoutError as exc:
|
||||
last_exception = exc
|
||||
log_structured_entry(
|
||||
(
|
||||
f"Timeout downloading gs://{self.bucket_name}/{file_name} "
|
||||
f"(attempt {attempt + 1}/{max_retries})"
|
||||
),
|
||||
"WARNING",
|
||||
{"error": str(exc)},
|
||||
)
|
||||
except aiohttp.ClientResponseError as exc:
|
||||
last_exception = exc
|
||||
if (
|
||||
exc.status == HTTP_TOO_MANY_REQUESTS
|
||||
or exc.status >= HTTP_SERVER_ERROR
|
||||
):
|
||||
log_structured_entry(
|
||||
(
|
||||
f"HTTP {exc.status} downloading gs://{self.bucket_name}/"
|
||||
f"{file_name} (attempt {attempt + 1}/{max_retries})"
|
||||
),
|
||||
"WARNING",
|
||||
{"status": exc.status, "message": str(exc)},
|
||||
)
|
||||
else:
|
||||
log_structured_entry(
|
||||
f"Non-retryable HTTP error downloading gs://{self.bucket_name}/{file_name}",
|
||||
"ERROR",
|
||||
{"status": exc.status, "message": str(exc)},
|
||||
)
|
||||
raise
|
||||
else:
|
||||
return file_stream
|
||||
|
||||
if attempt < max_retries - 1:
|
||||
delay = 0.5 * (2**attempt)
|
||||
log_structured_entry(
|
||||
"Retrying file download",
|
||||
"INFO",
|
||||
{"file": file_name, "delay_seconds": delay},
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
msg = (
|
||||
f"Failed to download gs://{self.bucket_name}/{file_name} "
|
||||
f"after {max_retries} attempts"
|
||||
)
|
||||
log_structured_entry(
|
||||
"File download failed after all retries",
|
||||
"ERROR",
|
||||
{
|
||||
"file": file_name,
|
||||
"bucket": self.bucket_name,
|
||||
"max_retries": max_retries,
|
||||
"last_error": str(last_exception),
|
||||
},
|
||||
)
|
||||
raise TimeoutError(msg) from last_exception
|
||||
223
src/knowledge_search_mcp/clients/vector_search.py
Normal file
223
src/knowledge_search_mcp/clients/vector_search.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""Google Cloud Vector Search client."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Sequence
|
||||
|
||||
from gcloud.aio.auth import Token
|
||||
|
||||
from knowledge_search_mcp.logging import log_structured_entry
|
||||
from knowledge_search_mcp.models import SearchResult, SourceNamespace
|
||||
|
||||
from .base import BaseGoogleCloudClient
|
||||
from .storage import GoogleCloudFileStorage
|
||||
|
||||
|
||||
class GoogleCloudVectorSearch(BaseGoogleCloudClient):
|
||||
"""Minimal async client for the Vertex AI Matching Engine REST API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
project_id: str,
|
||||
location: str,
|
||||
bucket: str,
|
||||
index_name: str | None = None,
|
||||
) -> None:
|
||||
"""Store configuration used to issue Matching Engine queries."""
|
||||
super().__init__()
|
||||
self.project_id = project_id
|
||||
self.location = location
|
||||
self.storage = GoogleCloudFileStorage(bucket=bucket)
|
||||
self.index_name = index_name
|
||||
self._async_token: Token | None = None
|
||||
self._endpoint_domain: str | None = None
|
||||
self._endpoint_name: str | None = None
|
||||
|
||||
async def _async_get_auth_headers(self) -> dict[str, str]:
|
||||
if self._async_token is None:
|
||||
self._async_token = Token(
|
||||
session=self._get_aio_session(),
|
||||
scopes=[
|
||||
"https://www.googleapis.com/auth/cloud-platform",
|
||||
],
|
||||
)
|
||||
access_token = await self._async_token.get()
|
||||
return {
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close aiohttp sessions for both vector search and storage."""
|
||||
await super().close()
|
||||
await self.storage.close()
|
||||
|
||||
def configure_index_endpoint(
|
||||
self,
|
||||
*,
|
||||
name: str,
|
||||
public_domain: str,
|
||||
) -> None:
|
||||
"""Persist the metadata needed to access a deployed endpoint."""
|
||||
if not name:
|
||||
msg = "Index endpoint name must be a non-empty string."
|
||||
raise ValueError(msg)
|
||||
if not public_domain:
|
||||
msg = "Index endpoint domain must be a non-empty public domain."
|
||||
raise ValueError(msg)
|
||||
self._endpoint_name = name
|
||||
self._endpoint_domain = public_domain
|
||||
|
||||
async def async_run_query(
|
||||
self,
|
||||
deployed_index_id: str,
|
||||
query: Sequence[float],
|
||||
limit: int,
|
||||
source: SourceNamespace | None = None,
|
||||
) -> list[SearchResult]:
|
||||
"""Run an async similarity search via the REST API.
|
||||
|
||||
Args:
|
||||
deployed_index_id: The ID of the deployed index.
|
||||
query: The embedding vector for the search query.
|
||||
limit: Maximum number of nearest neighbors to return.
|
||||
source: Optional namespace filter to restrict results by source.
|
||||
|
||||
Returns:
|
||||
A list of matched items with id, distance, and content.
|
||||
|
||||
"""
|
||||
if self._endpoint_domain is None or self._endpoint_name is None:
|
||||
msg = (
|
||||
"Missing endpoint metadata. Call "
|
||||
"`configure_index_endpoint` before querying."
|
||||
)
|
||||
log_structured_entry(
|
||||
"Vector search query failed - endpoint not configured",
|
||||
"ERROR",
|
||||
{"error": msg},
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
domain = self._endpoint_domain
|
||||
endpoint_id = self._endpoint_name.split("/")[-1]
|
||||
url = (
|
||||
f"https://{domain}/v1/projects/{self.project_id}"
|
||||
f"/locations/{self.location}"
|
||||
f"/indexEndpoints/{endpoint_id}:findNeighbors"
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Starting vector search query",
|
||||
"INFO",
|
||||
{
|
||||
"deployed_index_id": deployed_index_id,
|
||||
"neighbor_count": limit,
|
||||
"endpoint_id": endpoint_id,
|
||||
"embedding_dimension": len(query),
|
||||
},
|
||||
)
|
||||
|
||||
datapoint: dict = {"feature_vector": list(query)}
|
||||
if source is not None:
|
||||
datapoint["restricts"] = [
|
||||
{"namespace": "source", "allow_list": [source.value]},
|
||||
]
|
||||
payload = {
|
||||
"deployed_index_id": deployed_index_id,
|
||||
"queries": [
|
||||
{
|
||||
"datapoint": datapoint,
|
||||
"neighbor_count": limit,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
try:
|
||||
headers = await self._async_get_auth_headers()
|
||||
session = self._get_aio_session()
|
||||
async with session.post(
|
||||
url,
|
||||
json=payload,
|
||||
headers=headers,
|
||||
) as response:
|
||||
if not response.ok:
|
||||
body = await response.text()
|
||||
msg = f"findNeighbors returned {response.status}: {body}"
|
||||
log_structured_entry(
|
||||
"Vector search API request failed",
|
||||
"ERROR",
|
||||
{
|
||||
"status": response.status,
|
||||
"response_body": body,
|
||||
"deployed_index_id": deployed_index_id,
|
||||
},
|
||||
)
|
||||
raise RuntimeError(msg) # noqa: TRY301
|
||||
data = await response.json()
|
||||
|
||||
neighbors = data.get("nearestNeighbors", [{}])[0].get("neighbors", [])
|
||||
log_structured_entry(
|
||||
"Vector search API request successful",
|
||||
"INFO",
|
||||
{
|
||||
"neighbors_found": len(neighbors),
|
||||
"deployed_index_id": deployed_index_id,
|
||||
},
|
||||
)
|
||||
|
||||
if not neighbors:
|
||||
log_structured_entry(
|
||||
"No neighbors found in vector search",
|
||||
"WARNING",
|
||||
{"deployed_index_id": deployed_index_id},
|
||||
)
|
||||
return []
|
||||
|
||||
# Fetch content for all neighbors
|
||||
content_tasks = []
|
||||
for neighbor in neighbors:
|
||||
datapoint_id = neighbor["datapoint"]["datapointId"]
|
||||
file_path = f"{self.index_name}/contents/{datapoint_id}.md"
|
||||
content_tasks.append(
|
||||
self.storage.async_get_file_stream(file_path),
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Fetching content for search results",
|
||||
"INFO",
|
||||
{"file_count": len(content_tasks)},
|
||||
)
|
||||
|
||||
file_streams = await asyncio.gather(*content_tasks)
|
||||
results: list[SearchResult] = []
|
||||
for neighbor, stream in zip(
|
||||
neighbors,
|
||||
file_streams,
|
||||
strict=True,
|
||||
):
|
||||
results.append(
|
||||
SearchResult(
|
||||
id=neighbor["datapoint"]["datapointId"],
|
||||
distance=neighbor["distance"],
|
||||
content=stream.read().decode("utf-8"),
|
||||
),
|
||||
)
|
||||
|
||||
log_structured_entry(
|
||||
"Vector search completed successfully",
|
||||
"INFO",
|
||||
{"results_count": len(results), "deployed_index_id": deployed_index_id},
|
||||
)
|
||||
return results # noqa: TRY300
|
||||
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Vector search query failed with exception",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"deployed_index_id": deployed_index_id,
|
||||
},
|
||||
)
|
||||
raise
|
||||
111
src/knowledge_search_mcp/config.py
Normal file
111
src/knowledge_search_mcp/config.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Configuration management for the MCP server."""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pydantic_settings import (
|
||||
BaseSettings,
|
||||
PydanticBaseSettingsSource,
|
||||
YamlConfigSettingsSource,
|
||||
)
|
||||
|
||||
|
||||
def _parse_args() -> argparse.Namespace:
|
||||
"""Parse command-line arguments.
|
||||
|
||||
Returns a namespace with default values if running under pytest.
|
||||
"""
|
||||
# Don't parse args if running under pytest
|
||||
if "pytest" in sys.modules:
|
||||
parser = argparse.ArgumentParser()
|
||||
return argparse.Namespace(
|
||||
transport="stdio",
|
||||
host="0.0.0.0", # noqa: S104
|
||||
port=8080,
|
||||
config=os.environ.get("CONFIG_FILE", "config.yaml"),
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--transport",
|
||||
choices=["stdio", "sse", "streamable-http"],
|
||||
default="stdio",
|
||||
)
|
||||
parser.add_argument("--host", default="0.0.0.0") # noqa: S104
|
||||
parser.add_argument("--port", type=int, default=8080)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
default=os.environ.get("CONFIG_FILE", "config.yaml"),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
_args = _parse_args()
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Server configuration populated from env vars and a YAML config file."""
|
||||
|
||||
model_config = {"env_file": ".env", "yaml_file": _args.config}
|
||||
|
||||
project_id: str
|
||||
location: str
|
||||
bucket: str
|
||||
index_name: str
|
||||
deployed_index_id: str
|
||||
endpoint_name: str
|
||||
endpoint_domain: str
|
||||
embedding_model: str = "gemini-embedding-001"
|
||||
search_limit: int = 10
|
||||
log_name: str = "va_agent_evaluation_logs"
|
||||
log_level: str = "INFO"
|
||||
cloud_logging_enabled: bool = False
|
||||
|
||||
# Semantic cache (Redis)
|
||||
redis_url: str | None = None
|
||||
cache_name: str = "knowledge_search_cache"
|
||||
cache_vector_dims: int = 3072
|
||||
cache_distance_threshold: float = 0.12
|
||||
cache_ttl: int | None = 3600
|
||||
|
||||
@classmethod
|
||||
def settings_customise_sources(
|
||||
cls,
|
||||
settings_cls: type[BaseSettings],
|
||||
init_settings: PydanticBaseSettingsSource,
|
||||
env_settings: PydanticBaseSettingsSource,
|
||||
dotenv_settings: PydanticBaseSettingsSource,
|
||||
file_secret_settings: PydanticBaseSettingsSource,
|
||||
) -> tuple[PydanticBaseSettingsSource, ...]:
|
||||
"""Customize the order of settings sources to include YAML config."""
|
||||
return (
|
||||
init_settings,
|
||||
env_settings,
|
||||
dotenv_settings,
|
||||
YamlConfigSettingsSource(settings_cls),
|
||||
file_secret_settings,
|
||||
)
|
||||
|
||||
|
||||
# Lazy singleton instance of Settings
|
||||
_cfg: Settings | None = None
|
||||
|
||||
|
||||
def get_config() -> Settings:
|
||||
"""Get or create the singleton Settings instance."""
|
||||
global _cfg # noqa: PLW0603
|
||||
if _cfg is None:
|
||||
_cfg = Settings.model_validate({})
|
||||
return _cfg
|
||||
|
||||
|
||||
# For backwards compatibility, provide cfg as a property-like accessor
|
||||
class _ConfigProxy:
|
||||
"""Proxy object that lazily loads config on attribute access."""
|
||||
|
||||
def __getattr__(self, name: str) -> object:
|
||||
return getattr(get_config(), name)
|
||||
|
||||
|
||||
cfg = _ConfigProxy()
|
||||
67
src/knowledge_search_mcp/logging.py
Normal file
67
src/knowledge_search_mcp/logging.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Centralized Cloud Logging setup.
|
||||
|
||||
Uses CloudLoggingHandler (background thread) so logging does not add latency.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import google.cloud.logging
|
||||
from google.cloud.logging.handlers import CloudLoggingHandler
|
||||
|
||||
from .config import get_config
|
||||
|
||||
_eval_log: logging.Logger | None = None
|
||||
|
||||
|
||||
def _get_logger() -> logging.Logger:
|
||||
"""Get or create the singleton evaluation logger."""
|
||||
global _eval_log # noqa: PLW0603
|
||||
if _eval_log is not None:
|
||||
return _eval_log
|
||||
|
||||
cfg = get_config()
|
||||
logger = logging.getLogger(cfg.log_name)
|
||||
if any(isinstance(h, CloudLoggingHandler) for h in logger.handlers):
|
||||
_eval_log = logger
|
||||
return logger
|
||||
|
||||
if cfg.cloud_logging_enabled:
|
||||
try:
|
||||
client = google.cloud.logging.Client(project=cfg.project_id)
|
||||
handler = CloudLoggingHandler(client, name=cfg.log_name) # async transport
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(getattr(logging, cfg.log_level.upper()))
|
||||
except Exception as e: # noqa: BLE001
|
||||
# Fallback to console if Cloud Logging is unavailable (local dev)
|
||||
logging.basicConfig(level=getattr(logging, cfg.log_level.upper()))
|
||||
logger = logging.getLogger(cfg.log_name)
|
||||
logger.warning("Cloud Logging setup failed; using console. Error: %s", e)
|
||||
else:
|
||||
logging.basicConfig(level=getattr(logging, cfg.log_level.upper()))
|
||||
logger = logging.getLogger(cfg.log_name)
|
||||
|
||||
_eval_log = logger
|
||||
return logger
|
||||
|
||||
|
||||
def log_structured_entry(
|
||||
message: str,
|
||||
severity: Literal["INFO", "WARNING", "ERROR"],
|
||||
custom_log: dict | None = None,
|
||||
) -> None:
|
||||
"""Emit a JSON-structured log row.
|
||||
|
||||
Args:
|
||||
message: Short label for the row (e.g., "Final agent turn").
|
||||
severity: "INFO" | "WARNING" | "ERROR"
|
||||
custom_log: A dict with your structured payload.
|
||||
|
||||
"""
|
||||
level = getattr(logging, severity.upper(), logging.INFO)
|
||||
logger = _get_logger()
|
||||
logger.log(
|
||||
level,
|
||||
message,
|
||||
extra={"json_fields": {"message": message, "custom": custom_log or {}}},
|
||||
)
|
||||
38
src/knowledge_search_mcp/models.py
Normal file
38
src/knowledge_search_mcp/models.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Domain models for knowledge search MCP server."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from google import genai
|
||||
|
||||
from .clients.vector_search import GoogleCloudVectorSearch
|
||||
from .config import Settings
|
||||
from .services.semantic_cache import KnowledgeSemanticCache
|
||||
|
||||
|
||||
class SourceNamespace(StrEnum):
|
||||
"""Allowed values for the 'source' namespace filter."""
|
||||
|
||||
EDUCACION_FINANCIERA = "Educacion Financiera"
|
||||
PRODUCTOS_Y_SERVICIOS = "Productos y Servicios"
|
||||
FUNCIONALIDADES_APP_MOVIL = "Funcionalidades de la App Movil"
|
||||
|
||||
|
||||
class SearchResult(TypedDict):
|
||||
"""Structured response item returned by the vector search API."""
|
||||
|
||||
id: str
|
||||
distance: float
|
||||
content: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppContext:
|
||||
"""Shared resources initialised once at server startup."""
|
||||
|
||||
vector_search: "GoogleCloudVectorSearch"
|
||||
genai_client: "genai.Client"
|
||||
settings: "Settings"
|
||||
semantic_cache: "KnowledgeSemanticCache | None" = None
|
||||
168
src/knowledge_search_mcp/server.py
Normal file
168
src/knowledge_search_mcp/server.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""MCP server lifecycle management."""
|
||||
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from google import genai
|
||||
from mcp.server.fastmcp import FastMCP
|
||||
|
||||
from .clients.vector_search import GoogleCloudVectorSearch
|
||||
from .config import get_config
|
||||
from .logging import log_structured_entry
|
||||
from .models import AppContext
|
||||
from .services.semantic_cache import KnowledgeSemanticCache
|
||||
from .services.validation import (
|
||||
validate_gcs_access,
|
||||
validate_genai_access,
|
||||
validate_vector_search_access,
|
||||
)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_server: FastMCP) -> AsyncIterator[AppContext]:
|
||||
"""Create and configure the vector-search client for the server lifetime."""
|
||||
# Get config with proper types for initialization
|
||||
config_for_init = get_config()
|
||||
|
||||
log_structured_entry(
|
||||
"Initializing MCP server",
|
||||
"INFO",
|
||||
{
|
||||
"project_id": config_for_init.project_id,
|
||||
"location": config_for_init.location,
|
||||
"bucket": config_for_init.bucket,
|
||||
"index_name": config_for_init.index_name,
|
||||
},
|
||||
)
|
||||
|
||||
vs: GoogleCloudVectorSearch | None = None
|
||||
try:
|
||||
# Initialize vector search client
|
||||
log_structured_entry("Creating GoogleCloudVectorSearch client", "INFO")
|
||||
vs = GoogleCloudVectorSearch(
|
||||
project_id=config_for_init.project_id,
|
||||
location=config_for_init.location,
|
||||
bucket=config_for_init.bucket,
|
||||
index_name=config_for_init.index_name,
|
||||
)
|
||||
|
||||
# Configure endpoint
|
||||
log_structured_entry(
|
||||
"Configuring index endpoint",
|
||||
"INFO",
|
||||
{
|
||||
"endpoint_name": config_for_init.endpoint_name,
|
||||
"endpoint_domain": config_for_init.endpoint_domain,
|
||||
},
|
||||
)
|
||||
vs.configure_index_endpoint(
|
||||
name=config_for_init.endpoint_name,
|
||||
public_domain=config_for_init.endpoint_domain,
|
||||
)
|
||||
|
||||
# Initialize GenAI client
|
||||
log_structured_entry(
|
||||
"Creating GenAI client",
|
||||
"INFO",
|
||||
{
|
||||
"project_id": config_for_init.project_id,
|
||||
"location": config_for_init.location,
|
||||
},
|
||||
)
|
||||
genai_client = genai.Client(
|
||||
vertexai=True,
|
||||
project=config_for_init.project_id,
|
||||
location=config_for_init.location,
|
||||
)
|
||||
|
||||
# Validate credentials and configuration by testing actual resources
|
||||
# These validations are non-blocking - errors are logged but won't stop startup
|
||||
log_structured_entry("Starting validation of credentials and resources", "INFO")
|
||||
|
||||
validation_errors = []
|
||||
|
||||
# Run all validations
|
||||
config = get_config()
|
||||
genai_error = await validate_genai_access(genai_client, config)
|
||||
if genai_error:
|
||||
validation_errors.append(genai_error)
|
||||
|
||||
gcs_error = await validate_gcs_access(vs, config)
|
||||
if gcs_error:
|
||||
validation_errors.append(gcs_error)
|
||||
|
||||
vs_error = await validate_vector_search_access(vs, config)
|
||||
if vs_error:
|
||||
validation_errors.append(vs_error)
|
||||
|
||||
# Summary of validations
|
||||
if validation_errors:
|
||||
log_structured_entry(
|
||||
(
|
||||
"MCP server started with validation errors - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{
|
||||
"validation_errors": validation_errors,
|
||||
"error_count": len(validation_errors),
|
||||
},
|
||||
)
|
||||
else:
|
||||
log_structured_entry(
|
||||
"All validations passed - MCP server initialization complete", "INFO"
|
||||
)
|
||||
|
||||
# Initialize semantic cache if Redis is configured
|
||||
semantic_cache = None
|
||||
if config_for_init.redis_url:
|
||||
try:
|
||||
semantic_cache = KnowledgeSemanticCache(
|
||||
redis_url=config_for_init.redis_url,
|
||||
name=config_for_init.cache_name,
|
||||
vector_dims=config_for_init.cache_vector_dims,
|
||||
distance_threshold=config_for_init.cache_distance_threshold,
|
||||
ttl=config_for_init.cache_ttl,
|
||||
)
|
||||
log_structured_entry(
|
||||
"Semantic cache initialized",
|
||||
"INFO",
|
||||
{"redis_url": config_for_init.redis_url, "cache_name": config_for_init.cache_name},
|
||||
)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Semantic cache initialization failed, continuing without cache",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__},
|
||||
)
|
||||
|
||||
yield AppContext(
|
||||
vector_search=vs,
|
||||
genai_client=genai_client,
|
||||
settings=config,
|
||||
semantic_cache=semantic_cache,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Failed to initialize MCP server",
|
||||
"ERROR",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
},
|
||||
)
|
||||
raise
|
||||
finally:
|
||||
log_structured_entry("MCP server lifespan ending", "INFO")
|
||||
# Clean up resources
|
||||
if vs is not None:
|
||||
try:
|
||||
await vs.close()
|
||||
log_structured_entry("Closed aiohttp sessions", "INFO")
|
||||
except Exception as e: # noqa: BLE001
|
||||
log_structured_entry(
|
||||
"Error closing aiohttp sessions",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__},
|
||||
)
|
||||
21
src/knowledge_search_mcp/services/__init__.py
Normal file
21
src/knowledge_search_mcp/services/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Service modules for business logic."""
|
||||
|
||||
from .search import (
|
||||
filter_search_results,
|
||||
format_search_results,
|
||||
generate_query_embedding,
|
||||
)
|
||||
from .validation import (
|
||||
validate_gcs_access,
|
||||
validate_genai_access,
|
||||
validate_vector_search_access,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"filter_search_results",
|
||||
"format_search_results",
|
||||
"generate_query_embedding",
|
||||
"validate_gcs_access",
|
||||
"validate_genai_access",
|
||||
"validate_vector_search_access",
|
||||
]
|
||||
101
src/knowledge_search_mcp/services/search.py
Normal file
101
src/knowledge_search_mcp/services/search.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Search helper functions."""
|
||||
|
||||
from google import genai
|
||||
from google.genai import types as genai_types
|
||||
|
||||
from knowledge_search_mcp.logging import log_structured_entry
|
||||
from knowledge_search_mcp.models import SearchResult
|
||||
|
||||
|
||||
async def generate_query_embedding(
|
||||
genai_client: genai.Client,
|
||||
embedding_model: str,
|
||||
query: str,
|
||||
) -> tuple[list[float], str | None]:
|
||||
"""Generate embedding for search query.
|
||||
|
||||
Returns:
|
||||
Tuple of (embedding vector, error message). Error message is None on success.
|
||||
|
||||
"""
|
||||
if not query or not query.strip():
|
||||
return ([], "Error: Query cannot be empty")
|
||||
|
||||
log_structured_entry("Generating query embedding", "INFO")
|
||||
try:
|
||||
response = await genai_client.aio.models.embed_content(
|
||||
model=embedding_model,
|
||||
contents=query,
|
||||
config=genai_types.EmbedContentConfig(
|
||||
task_type="RETRIEVAL_QUERY",
|
||||
),
|
||||
)
|
||||
if not response.embeddings or not response.embeddings[0].values:
|
||||
return ([], "Error: Failed to generate embedding - empty response")
|
||||
embedding = response.embeddings[0].values
|
||||
return (embedding, None) # noqa: TRY300
|
||||
except Exception as e: # noqa: BLE001
|
||||
error_type = type(e).__name__
|
||||
error_msg = str(e)
|
||||
|
||||
# Check if it's a rate limit error
|
||||
if "429" in error_msg or "RESOURCE_EXHAUSTED" in error_msg:
|
||||
log_structured_entry(
|
||||
"Rate limit exceeded while generating embedding",
|
||||
"WARNING",
|
||||
{"error": error_msg, "error_type": error_type, "query": query[:100]},
|
||||
)
|
||||
return ([], "Error: API rate limit exceeded. Please try again later.")
|
||||
log_structured_entry(
|
||||
"Failed to generate query embedding",
|
||||
"ERROR",
|
||||
{"error": error_msg, "error_type": error_type, "query": query[:100]},
|
||||
)
|
||||
return ([], f"Error generating embedding: {error_msg}")
|
||||
|
||||
|
||||
def filter_search_results(
|
||||
results: list[SearchResult],
|
||||
min_similarity: float = 0.6,
|
||||
top_percent: float = 0.9,
|
||||
) -> list[SearchResult]:
|
||||
"""Filter search results by similarity thresholds.
|
||||
|
||||
Args:
|
||||
results: Raw search results from vector search.
|
||||
min_similarity: Minimum similarity score (distance) to include.
|
||||
top_percent: Keep results within this percentage of the top score.
|
||||
|
||||
Returns:
|
||||
Filtered list of search results.
|
||||
|
||||
"""
|
||||
if not results:
|
||||
return []
|
||||
|
||||
max_sim = max(r["distance"] for r in results)
|
||||
cutoff = max_sim * top_percent
|
||||
|
||||
return [
|
||||
s for s in results if s["distance"] > cutoff and s["distance"] > min_similarity
|
||||
]
|
||||
|
||||
|
||||
def format_search_results(results: list[SearchResult]) -> str:
|
||||
"""Format search results as XML-like documents.
|
||||
|
||||
Args:
|
||||
results: List of search results to format.
|
||||
|
||||
Returns:
|
||||
Formatted string with document tags.
|
||||
|
||||
"""
|
||||
if not results:
|
||||
return "No relevant documents found for your query."
|
||||
|
||||
formatted_results = [
|
||||
f"<document {i} name={result['id']}>\n{result['content']}\n</document {i}>"
|
||||
for i, result in enumerate(results, start=1)
|
||||
]
|
||||
return "\n".join(formatted_results)
|
||||
97
src/knowledge_search_mcp/services/semantic_cache.py
Normal file
97
src/knowledge_search_mcp/services/semantic_cache.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# ruff: noqa: INP001
|
||||
"""Semantic cache backed by Redis for knowledge search results."""
|
||||
|
||||
from redisvl.extensions.cache.llm.semantic import SemanticCache
|
||||
from redisvl.utils.vectorize.custom import CustomVectorizer
|
||||
|
||||
from ..logging import log_structured_entry
|
||||
|
||||
|
||||
def _stub_embed(content: object) -> list[float]:
|
||||
"""Stub vectorizer so SemanticCache creates an index with the right dims.
|
||||
|
||||
Never called at runtime — we always pass pre-computed vectors to
|
||||
``acheck`` and ``astore``. Only invoked once by ``CustomVectorizer``
|
||||
at init time to discover the dimensionality.
|
||||
"""
|
||||
return [0.0] * _stub_embed.dims # type: ignore[attr-defined]
|
||||
|
||||
|
||||
class KnowledgeSemanticCache:
|
||||
"""Thin wrapper around RedisVL SemanticCache with FLAT indexing."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_url: str,
|
||||
name: str = "knowledge_search_cache",
|
||||
vector_dims: int = 3072,
|
||||
distance_threshold: float = 0.12,
|
||||
ttl: int | None = 3600,
|
||||
) -> None:
|
||||
_stub_embed.dims = vector_dims # type: ignore[attr-defined]
|
||||
vectorizer = CustomVectorizer(embed=_stub_embed)
|
||||
|
||||
self._cache = SemanticCache(
|
||||
name=name,
|
||||
distance_threshold=distance_threshold,
|
||||
ttl=ttl,
|
||||
redis_url=redis_url,
|
||||
vectorizer=vectorizer,
|
||||
overwrite=False,
|
||||
)
|
||||
self._name = name
|
||||
|
||||
async def check(
|
||||
self,
|
||||
embedding: list[float],
|
||||
) -> str | None:
|
||||
"""Return cached response for a semantically similar query, or None."""
|
||||
try:
|
||||
results = await self._cache.acheck(
|
||||
vector=embedding,
|
||||
num_results=1,
|
||||
return_fields=["response", "prompt", "vector_distance"],
|
||||
)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Semantic cache check failed, skipping cache",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__},
|
||||
)
|
||||
return None
|
||||
|
||||
if not results:
|
||||
return None
|
||||
|
||||
hit = results[0]
|
||||
log_structured_entry(
|
||||
"Semantic cache hit",
|
||||
"INFO",
|
||||
{
|
||||
"vector_distance": hit.get("vector_distance"),
|
||||
"original_prompt": hit.get("prompt", "")[:100],
|
||||
},
|
||||
)
|
||||
return hit.get("response")
|
||||
|
||||
async def store(
|
||||
self,
|
||||
query: str,
|
||||
response: str,
|
||||
embedding: list[float],
|
||||
metadata: dict | None = None,
|
||||
) -> None:
|
||||
"""Store a query/response pair in the cache."""
|
||||
try:
|
||||
await self._cache.astore(
|
||||
prompt=query,
|
||||
response=response,
|
||||
vector=embedding,
|
||||
metadata=metadata,
|
||||
)
|
||||
except Exception as e:
|
||||
log_structured_entry(
|
||||
"Semantic cache store failed",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__},
|
||||
)
|
||||
214
src/knowledge_search_mcp/services/validation.py
Normal file
214
src/knowledge_search_mcp/services/validation.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""Validation functions for Google Cloud services."""
|
||||
|
||||
from gcloud.aio.auth import Token
|
||||
from google import genai
|
||||
from google.genai import types as genai_types
|
||||
|
||||
from knowledge_search_mcp.clients.vector_search import GoogleCloudVectorSearch
|
||||
from knowledge_search_mcp.config import Settings
|
||||
from knowledge_search_mcp.logging import log_structured_entry
|
||||
|
||||
# HTTP status codes
|
||||
HTTP_FORBIDDEN = 403
|
||||
HTTP_NOT_FOUND = 404
|
||||
|
||||
|
||||
async def validate_genai_access(
|
||||
genai_client: genai.Client, cfg: Settings
|
||||
) -> str | None:
|
||||
"""Validate GenAI embedding access.
|
||||
|
||||
Returns:
|
||||
Error message if validation fails, None if successful.
|
||||
|
||||
"""
|
||||
log_structured_entry("Validating GenAI embedding access", "INFO")
|
||||
try:
|
||||
test_response = await genai_client.aio.models.embed_content(
|
||||
model=cfg.embedding_model,
|
||||
contents="test",
|
||||
config=genai_types.EmbedContentConfig(
|
||||
task_type="RETRIEVAL_QUERY",
|
||||
),
|
||||
)
|
||||
if test_response and test_response.embeddings:
|
||||
embedding_values = test_response.embeddings[0].values
|
||||
log_structured_entry(
|
||||
"GenAI embedding validation successful",
|
||||
"INFO",
|
||||
{
|
||||
"embedding_dimension": len(embedding_values)
|
||||
if embedding_values
|
||||
else 0
|
||||
},
|
||||
)
|
||||
return None
|
||||
msg = "Embedding validation returned empty response"
|
||||
log_structured_entry(msg, "WARNING")
|
||||
return msg # noqa: TRY300
|
||||
except Exception as e: # noqa: BLE001
|
||||
log_structured_entry(
|
||||
(
|
||||
"Failed to validate GenAI embedding access - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__},
|
||||
)
|
||||
return f"GenAI: {e!s}"
|
||||
|
||||
|
||||
async def validate_gcs_access(vs: GoogleCloudVectorSearch, cfg: Settings) -> str | None:
|
||||
"""Validate GCS bucket access.
|
||||
|
||||
Returns:
|
||||
Error message if validation fails, None if successful.
|
||||
|
||||
"""
|
||||
log_structured_entry("Validating GCS bucket access", "INFO", {"bucket": cfg.bucket})
|
||||
try:
|
||||
session = vs.storage._get_aio_session() # noqa: SLF001
|
||||
token_obj = Token(
|
||||
session=session,
|
||||
scopes=["https://www.googleapis.com/auth/cloud-platform"],
|
||||
)
|
||||
access_token = await token_obj.get()
|
||||
headers = {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
async with session.get(
|
||||
f"https://storage.googleapis.com/storage/v1/b/{cfg.bucket}/o?maxResults=1",
|
||||
headers=headers,
|
||||
) as response:
|
||||
if response.status == HTTP_FORBIDDEN:
|
||||
msg = f"Access denied to bucket '{cfg.bucket}'. Check permissions."
|
||||
log_structured_entry(
|
||||
(
|
||||
"GCS bucket validation failed - access denied - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status},
|
||||
)
|
||||
return msg
|
||||
if response.status == HTTP_NOT_FOUND:
|
||||
msg = f"Bucket '{cfg.bucket}' not found. Check bucket name and project."
|
||||
log_structured_entry(
|
||||
(
|
||||
"GCS bucket validation failed - not found - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status},
|
||||
)
|
||||
return msg
|
||||
if not response.ok:
|
||||
body = await response.text()
|
||||
msg = f"Failed to access bucket '{cfg.bucket}': {response.status}"
|
||||
log_structured_entry(
|
||||
"GCS bucket validation failed - service may not work correctly",
|
||||
"WARNING",
|
||||
{"bucket": cfg.bucket, "status": response.status, "response": body},
|
||||
)
|
||||
return msg
|
||||
log_structured_entry(
|
||||
"GCS bucket validation successful", "INFO", {"bucket": cfg.bucket}
|
||||
)
|
||||
return None
|
||||
except Exception as e: # noqa: BLE001
|
||||
log_structured_entry(
|
||||
"Failed to validate GCS bucket access - service may not work correctly",
|
||||
"WARNING",
|
||||
{"error": str(e), "error_type": type(e).__name__, "bucket": cfg.bucket},
|
||||
)
|
||||
return f"GCS: {e!s}"
|
||||
|
||||
|
||||
async def validate_vector_search_access(
|
||||
vs: GoogleCloudVectorSearch, cfg: Settings
|
||||
) -> str | None:
|
||||
"""Validate vector search endpoint access.
|
||||
|
||||
Returns:
|
||||
Error message if validation fails, None if successful.
|
||||
|
||||
"""
|
||||
log_structured_entry(
|
||||
"Validating vector search endpoint access",
|
||||
"INFO",
|
||||
{"endpoint_name": cfg.endpoint_name},
|
||||
)
|
||||
try:
|
||||
headers = await vs._async_get_auth_headers() # noqa: SLF001
|
||||
session = vs._get_aio_session() # noqa: SLF001
|
||||
endpoint_url = (
|
||||
f"https://{cfg.location}-aiplatform.googleapis.com/v1/{cfg.endpoint_name}"
|
||||
)
|
||||
|
||||
async with session.get(endpoint_url, headers=headers) as response:
|
||||
if response.status == HTTP_FORBIDDEN:
|
||||
msg = (
|
||||
f"Access denied to endpoint '{cfg.endpoint_name}'. "
|
||||
"Check permissions."
|
||||
)
|
||||
log_structured_entry(
|
||||
(
|
||||
"Vector search endpoint validation failed - "
|
||||
"access denied - service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{"endpoint": cfg.endpoint_name, "status": response.status},
|
||||
)
|
||||
return msg
|
||||
if response.status == HTTP_NOT_FOUND:
|
||||
msg = (
|
||||
f"Endpoint '{cfg.endpoint_name}' not found. "
|
||||
"Check endpoint name and project."
|
||||
)
|
||||
log_structured_entry(
|
||||
(
|
||||
"Vector search endpoint validation failed - "
|
||||
"not found - service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{"endpoint": cfg.endpoint_name, "status": response.status},
|
||||
)
|
||||
return msg
|
||||
if not response.ok:
|
||||
body = await response.text()
|
||||
msg = (
|
||||
f"Failed to access endpoint '{cfg.endpoint_name}': "
|
||||
f"{response.status}"
|
||||
)
|
||||
log_structured_entry(
|
||||
(
|
||||
"Vector search endpoint validation failed - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{
|
||||
"endpoint": cfg.endpoint_name,
|
||||
"status": response.status,
|
||||
"response": body,
|
||||
},
|
||||
)
|
||||
return msg
|
||||
log_structured_entry(
|
||||
"Vector search endpoint validation successful",
|
||||
"INFO",
|
||||
{"endpoint": cfg.endpoint_name},
|
||||
)
|
||||
return None
|
||||
except Exception as e: # noqa: BLE001
|
||||
log_structured_entry(
|
||||
(
|
||||
"Failed to validate vector search endpoint access - "
|
||||
"service may not work correctly"
|
||||
),
|
||||
"WARNING",
|
||||
{
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"endpoint": cfg.endpoint_name,
|
||||
},
|
||||
)
|
||||
return f"Vector Search: {e!s}"
|
||||
5
src/knowledge_search_mcp/utils/__init__.py
Normal file
5
src/knowledge_search_mcp/utils/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Utility modules for knowledge search MCP server."""
|
||||
|
||||
from .cache import LRUCache
|
||||
|
||||
__all__ = ["LRUCache"]
|
||||
32
src/knowledge_search_mcp/utils/cache.py
Normal file
32
src/knowledge_search_mcp/utils/cache.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""LRU cache implementation."""
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class LRUCache:
|
||||
"""Simple LRU cache with size limit."""
|
||||
|
||||
def __init__(self, max_size: int = 100) -> None:
|
||||
"""Initialize cache with maximum size."""
|
||||
self.cache: OrderedDict[str, bytes] = OrderedDict()
|
||||
self.max_size = max_size
|
||||
|
||||
def get(self, key: str) -> bytes | None:
|
||||
"""Get item from cache, returning None if not found."""
|
||||
if key not in self.cache:
|
||||
return None
|
||||
# Move to end to mark as recently used
|
||||
self.cache.move_to_end(key)
|
||||
return self.cache[key]
|
||||
|
||||
def put(self, key: str, value: bytes) -> None:
|
||||
"""Put item in cache, evicting oldest if at capacity."""
|
||||
if key in self.cache:
|
||||
self.cache.move_to_end(key)
|
||||
self.cache[key] = value
|
||||
if len(self.cache) > self.max_size:
|
||||
self.cache.popitem(last=False)
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
"""Check if key exists in cache."""
|
||||
return key in self.cache
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for knowledge-search-mcp."""
|
||||
36
tests/conftest.py
Normal file
36
tests/conftest.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Pytest configuration and shared fixtures."""
|
||||
|
||||
import os
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_env_vars(monkeypatch):
|
||||
"""Set required environment variables for testing."""
|
||||
test_env = {
|
||||
"PROJECT_ID": "test-project",
|
||||
"LOCATION": "us-central1",
|
||||
"BUCKET": "test-bucket",
|
||||
"INDEX_NAME": "test-index",
|
||||
"DEPLOYED_INDEX_ID": "test-deployed-index",
|
||||
"ENDPOINT_NAME": "projects/test/locations/us-central1/indexEndpoints/test",
|
||||
"ENDPOINT_DOMAIN": "test.us-central1-aiplatform.googleapis.com",
|
||||
}
|
||||
for key, value in test_env.items():
|
||||
monkeypatch.setenv(key, value)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gcs_storage():
|
||||
"""Mock Google Cloud Storage client."""
|
||||
mock = MagicMock()
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_vector_search():
|
||||
"""Mock vector search client."""
|
||||
mock = MagicMock()
|
||||
return mock
|
||||
56
tests/test_config.py
Normal file
56
tests/test_config.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Tests for configuration management."""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from knowledge_search_mcp.config import Settings
|
||||
|
||||
|
||||
def test_settings_from_env():
|
||||
"""Test that Settings can be loaded from environment variables."""
|
||||
# Environment is set by conftest.py fixture
|
||||
settings = Settings.model_validate({})
|
||||
|
||||
assert settings.project_id == "test-project"
|
||||
assert settings.location == "us-central1"
|
||||
assert settings.bucket == "test-bucket"
|
||||
assert settings.index_name == "test-index"
|
||||
assert settings.deployed_index_id == "test-deployed-index"
|
||||
|
||||
|
||||
def test_settings_defaults():
|
||||
"""Test that Settings has correct default values."""
|
||||
settings = Settings.model_validate({})
|
||||
|
||||
assert settings.embedding_model == "gemini-embedding-001"
|
||||
assert settings.search_limit == 10
|
||||
assert settings.log_name == "va_agent_evaluation_logs"
|
||||
assert settings.log_level == "INFO"
|
||||
|
||||
|
||||
def test_settings_custom_values(monkeypatch):
|
||||
"""Test that Settings can be customized via environment."""
|
||||
monkeypatch.setenv("EMBEDDING_MODEL", "custom-embedding-model")
|
||||
monkeypatch.setenv("SEARCH_LIMIT", "20")
|
||||
monkeypatch.setenv("LOG_LEVEL", "DEBUG")
|
||||
|
||||
settings = Settings.model_validate({})
|
||||
|
||||
assert settings.embedding_model == "custom-embedding-model"
|
||||
assert settings.search_limit == 20
|
||||
assert settings.log_level == "DEBUG"
|
||||
|
||||
|
||||
def test_settings_validation_error():
|
||||
"""Test that Settings raises ValidationError when required fields are missing."""
|
||||
# Clear all env vars temporarily
|
||||
required_vars = [
|
||||
"PROJECT_ID", "LOCATION", "BUCKET", "INDEX_NAME",
|
||||
"DEPLOYED_INDEX_ID", "ENDPOINT_NAME", "ENDPOINT_DOMAIN"
|
||||
]
|
||||
|
||||
# This should work with conftest fixture
|
||||
settings = Settings.model_validate({})
|
||||
assert settings.project_id == "test-project"
|
||||
411
tests/test_main_tool.py
Normal file
411
tests/test_main_tool.py
Normal file
@@ -0,0 +1,411 @@
|
||||
"""Tests for the main knowledge_search tool."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from knowledge_search_mcp.__main__ import knowledge_search
|
||||
from knowledge_search_mcp.models import AppContext, SourceNamespace, SearchResult
|
||||
|
||||
|
||||
class TestKnowledgeSearch:
|
||||
"""Tests for knowledge_search tool function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_app_context(self):
|
||||
"""Create a mock AppContext."""
|
||||
app = MagicMock(spec=AppContext)
|
||||
|
||||
# Mock genai_client
|
||||
app.genai_client = MagicMock()
|
||||
|
||||
# Mock vector_search
|
||||
app.vector_search = MagicMock()
|
||||
app.vector_search.async_run_query = AsyncMock()
|
||||
|
||||
# Mock settings
|
||||
app.settings = MagicMock()
|
||||
app.settings.embedding_model = "models/text-embedding-004"
|
||||
app.settings.deployed_index_id = "test-deployed-index"
|
||||
app.settings.search_limit = 10
|
||||
|
||||
# No semantic cache by default
|
||||
app.semantic_cache = None
|
||||
|
||||
return app
|
||||
|
||||
@pytest.fixture
|
||||
def mock_context(self, mock_app_context):
|
||||
"""Create a mock MCP Context."""
|
||||
ctx = MagicMock()
|
||||
ctx.request_context = MagicMock()
|
||||
ctx.request_context.lifespan_context = mock_app_context
|
||||
return ctx
|
||||
|
||||
@pytest.fixture
|
||||
def sample_embedding(self):
|
||||
"""Create a sample embedding vector."""
|
||||
return [0.1, 0.2, 0.3, 0.4, 0.5]
|
||||
|
||||
@pytest.fixture
|
||||
def sample_search_results(self):
|
||||
"""Create sample search results."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1.txt", "distance": 0.95, "content": "First document content"},
|
||||
{"id": "doc2.txt", "distance": 0.85, "content": "Second document content"},
|
||||
{"id": "doc3.txt", "distance": 0.75, "content": "Third document content"},
|
||||
]
|
||||
return results
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_successful_search(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test successful search workflow."""
|
||||
# Setup mocks
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
mock_format.return_value = "<document 1 name=doc1.txt>\nFirst document content\n</document 1>"
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("What is financial education?", mock_context)
|
||||
|
||||
# Assert
|
||||
assert result == "<document 1 name=doc1.txt>\nFirst document content\n</document 1>"
|
||||
mock_generate.assert_called_once()
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_called_once_with(
|
||||
deployed_index_id="test-deployed-index",
|
||||
query=sample_embedding,
|
||||
limit=10,
|
||||
source=None,
|
||||
)
|
||||
mock_filter.assert_called_once_with(sample_search_results)
|
||||
mock_format.assert_called_once_with(sample_search_results)
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_embedding_generation_error(self, mock_generate, mock_context):
|
||||
"""Test handling of embedding generation error."""
|
||||
# Setup mock to return error
|
||||
mock_generate.return_value = ([], "Error: API rate limit exceeded. Please try again later.")
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
# Assert
|
||||
assert result == "Error: API rate limit exceeded. Please try again later."
|
||||
mock_generate.assert_called_once()
|
||||
# Vector search should not be called
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_not_called()
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_empty_query_error(self, mock_generate, mock_context):
|
||||
"""Test handling of empty query."""
|
||||
# Setup mock to return error for empty query
|
||||
mock_generate.return_value = ([], "Error: Query cannot be empty")
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("", mock_context)
|
||||
|
||||
# Assert
|
||||
assert result == "Error: Query cannot be empty"
|
||||
mock_generate.assert_called_once()
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_vector_search_error(self, mock_generate, mock_context, sample_embedding):
|
||||
"""Test handling of vector search error."""
|
||||
# Setup mocks
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = Exception(
|
||||
"Vector search service unavailable"
|
||||
)
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
# Assert
|
||||
assert "Error performing vector search:" in result
|
||||
assert "Vector search service unavailable" in result
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_empty_search_results(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding
|
||||
):
|
||||
"""Test handling of empty search results."""
|
||||
# Setup mocks
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = []
|
||||
mock_filter.return_value = []
|
||||
mock_format.return_value = "No relevant documents found for your query."
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("obscure query", mock_context)
|
||||
|
||||
# Assert
|
||||
assert result == "No relevant documents found for your query."
|
||||
mock_format.assert_called_once_with([])
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_filtered_results_empty(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test when filtering removes all results."""
|
||||
# Setup mocks - results exist but get filtered out
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = [] # All filtered out
|
||||
mock_format.return_value = "No relevant documents found for your query."
|
||||
|
||||
# Execute
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
# Assert
|
||||
assert result == "No relevant documents found for your query."
|
||||
mock_filter.assert_called_once_with(sample_search_results)
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_source_filter_parameter(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test that source filter is passed correctly to vector search."""
|
||||
# Setup mocks
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
mock_format.return_value = "formatted results"
|
||||
|
||||
# Execute with source filter
|
||||
source_filter = SourceNamespace.EDUCACION_FINANCIERA
|
||||
result = await knowledge_search("test query", mock_context, source=source_filter)
|
||||
|
||||
# Assert
|
||||
assert result == "formatted results"
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_called_once_with(
|
||||
deployed_index_id="test-deployed-index",
|
||||
query=sample_embedding,
|
||||
limit=10,
|
||||
source=source_filter,
|
||||
)
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_all_source_filters(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test all available source filter values."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
mock_format.return_value = "results"
|
||||
|
||||
# Test each source filter
|
||||
for source in SourceNamespace:
|
||||
result = await knowledge_search("test query", mock_context, source=source)
|
||||
assert result == "results"
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_vector_search_timeout(self, mock_generate, mock_context, sample_embedding):
|
||||
"""Test handling of vector search timeout."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = TimeoutError(
|
||||
"Request timed out"
|
||||
)
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert "Error performing vector search:" in result
|
||||
assert "Request timed out" in result
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_vector_search_connection_error(self, mock_generate, mock_context, sample_embedding):
|
||||
"""Test handling of vector search connection error."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = ConnectionError(
|
||||
"Connection refused"
|
||||
)
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert "Error performing vector search:" in result
|
||||
assert "Connection refused" in result
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
async def test_format_results_unexpected_error(
|
||||
self,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test handling of unexpected error in format_search_results."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
|
||||
# Mock format_search_results to raise an error
|
||||
with patch('knowledge_search_mcp.__main__.format_search_results', side_effect=ValueError("Format error")):
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert "Unexpected error during search:" in result
|
||||
assert "Format error" in result
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_filter_results_unexpected_error(self, mock_generate, mock_context, sample_embedding):
|
||||
"""Test handling of unexpected error in filter_search_results."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = [
|
||||
{"id": "doc1", "distance": 0.9, "content": "test"}
|
||||
]
|
||||
|
||||
# Mock filter_search_results to raise an error
|
||||
with patch('knowledge_search_mcp.__main__.filter_search_results', side_effect=TypeError("Filter error")):
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert "Unexpected error during search:" in result
|
||||
assert "Filter error" in result
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_long_query_truncation_in_logs(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test that long queries are handled correctly."""
|
||||
# Setup mocks
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
mock_format.return_value = "results"
|
||||
|
||||
# Execute with very long query
|
||||
long_query = "a" * 500
|
||||
result = await knowledge_search(long_query, mock_context)
|
||||
|
||||
# Assert - should succeed
|
||||
assert result == "results"
|
||||
# Verify generate_query_embedding was called with full query
|
||||
assert mock_generate.call_args[0][2] == long_query
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_multiple_results_returned(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding
|
||||
):
|
||||
"""Test handling of multiple search results."""
|
||||
# Create larger result set
|
||||
large_results: list[SearchResult] = [
|
||||
{"id": f"doc{i}.txt", "distance": 0.9 - (i * 0.05), "content": f"Content {i}"}
|
||||
for i in range(10)
|
||||
]
|
||||
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = large_results
|
||||
mock_filter.return_value = large_results[:5] # Filter to top 5
|
||||
mock_format.return_value = "formatted 5 results"
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert result == "formatted 5 results"
|
||||
mock_filter.assert_called_once_with(large_results)
|
||||
mock_format.assert_called_once_with(large_results[:5])
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
@patch('knowledge_search_mcp.__main__.filter_search_results')
|
||||
@patch('knowledge_search_mcp.__main__.format_search_results')
|
||||
async def test_settings_values_used_correctly(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_search_results
|
||||
):
|
||||
"""Test that settings values are used correctly."""
|
||||
# Customize settings
|
||||
mock_context.request_context.lifespan_context.settings.embedding_model = "custom-model"
|
||||
mock_context.request_context.lifespan_context.settings.deployed_index_id = "custom-index"
|
||||
mock_context.request_context.lifespan_context.settings.search_limit = 20
|
||||
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
|
||||
mock_filter.return_value = sample_search_results
|
||||
mock_format.return_value = "results"
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
# Verify embedding model
|
||||
assert mock_generate.call_args[0][1] == "custom-model"
|
||||
|
||||
# Verify vector search parameters
|
||||
call_kwargs = mock_context.request_context.lifespan_context.vector_search.async_run_query.call_args.kwargs
|
||||
assert call_kwargs["deployed_index_id"] == "custom-index"
|
||||
assert call_kwargs["limit"] == 20
|
||||
|
||||
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
|
||||
async def test_graceful_degradation_on_partial_failure(
|
||||
self, mock_generate, mock_context, sample_embedding
|
||||
):
|
||||
"""Test that errors are caught and returned as strings, not raised."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = RuntimeError(
|
||||
"Critical failure"
|
||||
)
|
||||
|
||||
# Should not raise, should return error message
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Error performing vector search:" in result
|
||||
assert "Critical failure" in result
|
||||
110
tests/test_search.py
Normal file
110
tests/test_search.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Tests for vector search functionality."""
|
||||
|
||||
import io
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from knowledge_search_mcp import (
|
||||
GoogleCloudFileStorage,
|
||||
GoogleCloudVectorSearch,
|
||||
LRUCache,
|
||||
SourceNamespace,
|
||||
)
|
||||
|
||||
|
||||
class TestGoogleCloudFileStorage:
|
||||
"""Tests for GoogleCloudFileStorage."""
|
||||
|
||||
def test_init(self):
|
||||
"""Test storage initialization."""
|
||||
storage = GoogleCloudFileStorage(bucket="test-bucket")
|
||||
assert storage.bucket_name == "test-bucket"
|
||||
assert isinstance(storage._cache, LRUCache)
|
||||
assert storage._cache.max_size == 100
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cache_hit(self):
|
||||
"""Test that cached files are returned without fetching."""
|
||||
storage = GoogleCloudFileStorage(bucket="test-bucket")
|
||||
test_content = b"cached content"
|
||||
storage._cache.put("test.md", test_content)
|
||||
|
||||
result = await storage.async_get_file_stream("test.md")
|
||||
|
||||
assert result.read() == test_content
|
||||
assert result.name == "test.md"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cache_miss(self):
|
||||
"""Test that uncached files are fetched from GCS."""
|
||||
storage = GoogleCloudFileStorage(bucket="test-bucket")
|
||||
test_content = b"fetched content"
|
||||
|
||||
# Mock the storage download
|
||||
with patch.object(storage, '_get_aio_storage') as mock_storage_getter:
|
||||
mock_storage = AsyncMock()
|
||||
mock_storage.download = AsyncMock(return_value=test_content)
|
||||
mock_storage_getter.return_value = mock_storage
|
||||
|
||||
result = await storage.async_get_file_stream("test.md")
|
||||
|
||||
assert result.read() == test_content
|
||||
assert storage._cache.get("test.md") == test_content
|
||||
|
||||
|
||||
class TestGoogleCloudVectorSearch:
|
||||
"""Tests for GoogleCloudVectorSearch."""
|
||||
|
||||
def test_init(self):
|
||||
"""Test vector search client initialization."""
|
||||
vs = GoogleCloudVectorSearch(
|
||||
project_id="test-project",
|
||||
location="us-central1",
|
||||
bucket="test-bucket",
|
||||
index_name="test-index",
|
||||
)
|
||||
|
||||
assert vs.project_id == "test-project"
|
||||
assert vs.location == "us-central1"
|
||||
assert vs.index_name == "test-index"
|
||||
|
||||
def test_configure_index_endpoint(self):
|
||||
"""Test endpoint configuration."""
|
||||
vs = GoogleCloudVectorSearch(
|
||||
project_id="test-project",
|
||||
location="us-central1",
|
||||
bucket="test-bucket",
|
||||
)
|
||||
|
||||
vs.configure_index_endpoint(
|
||||
name="test-endpoint",
|
||||
public_domain="test.domain.com",
|
||||
)
|
||||
|
||||
assert vs._endpoint_name == "test-endpoint"
|
||||
assert vs._endpoint_domain == "test.domain.com"
|
||||
|
||||
def test_configure_index_endpoint_validation(self):
|
||||
"""Test that endpoint configuration validates inputs."""
|
||||
vs = GoogleCloudVectorSearch(
|
||||
project_id="test-project",
|
||||
location="us-central1",
|
||||
bucket="test-bucket",
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="endpoint name"):
|
||||
vs.configure_index_endpoint(name="", public_domain="test.com")
|
||||
|
||||
with pytest.raises(ValueError, match="endpoint domain"):
|
||||
vs.configure_index_endpoint(name="test", public_domain="")
|
||||
|
||||
|
||||
class TestSourceNamespace:
|
||||
"""Tests for SourceNamespace enum."""
|
||||
|
||||
def test_source_namespace_values(self):
|
||||
"""Test that SourceNamespace has expected values."""
|
||||
assert SourceNamespace.EDUCACION_FINANCIERA.value == "Educacion Financiera"
|
||||
assert SourceNamespace.PRODUCTOS_Y_SERVICIOS.value == "Productos y Servicios"
|
||||
assert SourceNamespace.FUNCIONALIDADES_APP_MOVIL.value == "Funcionalidades de la App Movil"
|
||||
381
tests/test_search_services.py
Normal file
381
tests/test_search_services.py
Normal file
@@ -0,0 +1,381 @@
|
||||
"""Tests for search service functions."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from knowledge_search_mcp.services.search import (
|
||||
generate_query_embedding,
|
||||
filter_search_results,
|
||||
format_search_results,
|
||||
)
|
||||
from knowledge_search_mcp.models import SearchResult
|
||||
|
||||
|
||||
class TestGenerateQueryEmbedding:
|
||||
"""Tests for generate_query_embedding function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_genai_client(self):
|
||||
"""Create a mock genai client."""
|
||||
client = MagicMock()
|
||||
client.aio = MagicMock()
|
||||
client.aio.models = MagicMock()
|
||||
return client
|
||||
|
||||
async def test_successful_embedding_generation(self, mock_genai_client):
|
||||
"""Test successful embedding generation."""
|
||||
# Setup mock response
|
||||
mock_response = MagicMock()
|
||||
mock_embedding = MagicMock()
|
||||
mock_embedding.values = [0.1, 0.2, 0.3, 0.4, 0.5]
|
||||
mock_response.embeddings = [mock_embedding]
|
||||
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
|
||||
|
||||
# Execute
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
"What is financial education?"
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert error is None
|
||||
assert embedding == [0.1, 0.2, 0.3, 0.4, 0.5]
|
||||
mock_genai_client.aio.models.embed_content.assert_called_once()
|
||||
call_kwargs = mock_genai_client.aio.models.embed_content.call_args.kwargs
|
||||
assert call_kwargs["model"] == "models/text-embedding-004"
|
||||
assert call_kwargs["contents"] == "What is financial education?"
|
||||
assert call_kwargs["config"].task_type == "RETRIEVAL_QUERY"
|
||||
|
||||
async def test_empty_query_string(self, mock_genai_client):
|
||||
"""Test handling of empty query string."""
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
""
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert error == "Error: Query cannot be empty"
|
||||
mock_genai_client.aio.models.embed_content.assert_not_called()
|
||||
|
||||
async def test_whitespace_only_query(self, mock_genai_client):
|
||||
"""Test handling of whitespace-only query."""
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
" \t\n "
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert error == "Error: Query cannot be empty"
|
||||
mock_genai_client.aio.models.embed_content.assert_not_called()
|
||||
|
||||
async def test_rate_limit_error_429(self, mock_genai_client):
|
||||
"""Test handling of 429 rate limit error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=Exception("429 Too Many Requests")
|
||||
)
|
||||
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
"test query"
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert error == "Error: API rate limit exceeded. Please try again later."
|
||||
|
||||
async def test_rate_limit_error_resource_exhausted(self, mock_genai_client):
|
||||
"""Test handling of RESOURCE_EXHAUSTED error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=Exception("RESOURCE_EXHAUSTED: Quota exceeded")
|
||||
)
|
||||
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
"test query"
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert error == "Error: API rate limit exceeded. Please try again later."
|
||||
|
||||
async def test_generic_api_error(self, mock_genai_client):
|
||||
"""Test handling of generic API error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=ValueError("Invalid model name")
|
||||
)
|
||||
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"invalid-model",
|
||||
"test query"
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert "Error generating embedding: Invalid model name" in error
|
||||
|
||||
async def test_network_error(self, mock_genai_client):
|
||||
"""Test handling of network error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=ConnectionError("Network unreachable")
|
||||
)
|
||||
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
"test query"
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert "Error generating embedding: Network unreachable" in error
|
||||
|
||||
async def test_long_query_truncation_in_logging(self, mock_genai_client):
|
||||
"""Test that long queries are truncated in error logging."""
|
||||
long_query = "a" * 200
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=Exception("API error")
|
||||
)
|
||||
|
||||
embedding, error = await generate_query_embedding(
|
||||
mock_genai_client,
|
||||
"models/text-embedding-004",
|
||||
long_query
|
||||
)
|
||||
|
||||
assert embedding == []
|
||||
assert error is not None
|
||||
|
||||
|
||||
class TestFilterSearchResults:
|
||||
"""Tests for filter_search_results function."""
|
||||
|
||||
def test_empty_results(self):
|
||||
"""Test filtering empty results list."""
|
||||
filtered = filter_search_results([])
|
||||
assert filtered == []
|
||||
|
||||
def test_single_result_above_thresholds(self):
|
||||
"""Test single result above both thresholds."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.85, "content": "test content"}
|
||||
]
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0]["id"] == "doc1"
|
||||
|
||||
def test_single_result_below_min_similarity(self):
|
||||
"""Test single result below minimum similarity threshold."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.5, "content": "test content"}
|
||||
]
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert filtered == []
|
||||
|
||||
def test_multiple_results_all_above_thresholds(self):
|
||||
"""Test multiple results all above thresholds."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.95, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.90, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.85, "content": "content 3"},
|
||||
]
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.8)
|
||||
# max_sim = 0.95, cutoff = 0.95 * 0.8 = 0.76
|
||||
# Results with distance > 0.76 and > 0.6: all three
|
||||
assert len(filtered) == 3
|
||||
|
||||
def test_top_percent_filtering(self):
|
||||
"""Test filtering by top_percent threshold."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 1.0, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.95, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.85, "content": "content 3"},
|
||||
{"id": "doc4", "distance": 0.70, "content": "content 4"},
|
||||
]
|
||||
# max_sim = 1.0, cutoff = 1.0 * 0.9 = 0.9
|
||||
# Results with distance > 0.9: doc1 (1.0), doc2 (0.95)
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert len(filtered) == 2
|
||||
assert filtered[0]["id"] == "doc1"
|
||||
assert filtered[1]["id"] == "doc2"
|
||||
|
||||
def test_min_similarity_filtering(self):
|
||||
"""Test filtering by minimum similarity threshold."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.95, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.75, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.55, "content": "content 3"},
|
||||
]
|
||||
# max_sim = 0.95, cutoff = 0.95 * 0.9 = 0.855
|
||||
# doc1 > 0.855 and > 0.7: included
|
||||
# doc2 < 0.855: excluded by top_percent
|
||||
# doc3 < 0.7: excluded by min_similarity
|
||||
filtered = filter_search_results(results, min_similarity=0.7, top_percent=0.9)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0]["id"] == "doc1"
|
||||
|
||||
def test_default_parameters(self):
|
||||
"""Test filtering with default parameters."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.95, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.85, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.50, "content": "content 3"},
|
||||
]
|
||||
# Default: min_similarity=0.6, top_percent=0.9
|
||||
# max_sim = 0.95, cutoff = 0.95 * 0.9 = 0.855
|
||||
# doc1 > 0.855 and > 0.6: included
|
||||
# doc2 < 0.855: excluded
|
||||
# doc3 < 0.6: excluded
|
||||
filtered = filter_search_results(results)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0]["id"] == "doc1"
|
||||
|
||||
def test_all_results_filtered_out(self):
|
||||
"""Test when all results are filtered out."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.55, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.45, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.35, "content": "content 3"},
|
||||
]
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert filtered == []
|
||||
|
||||
def test_exact_threshold_boundaries(self):
|
||||
"""Test behavior at exact threshold boundaries."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.9, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.6, "content": "content 2"},
|
||||
]
|
||||
# max_sim = 0.9, cutoff = 0.9 * 0.9 = 0.81
|
||||
# doc1: 0.9 > 0.81 and 0.9 > 0.6: included
|
||||
# doc2: 0.6 < 0.81: excluded
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert len(filtered) == 1
|
||||
assert filtered[0]["id"] == "doc1"
|
||||
|
||||
def test_identical_distances(self):
|
||||
"""Test filtering with identical distance values."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1", "distance": 0.8, "content": "content 1"},
|
||||
{"id": "doc2", "distance": 0.8, "content": "content 2"},
|
||||
{"id": "doc3", "distance": 0.8, "content": "content 3"},
|
||||
]
|
||||
# max_sim = 0.8, cutoff = 0.8 * 0.9 = 0.72
|
||||
# All have distance 0.8 > 0.72 and > 0.6: all included
|
||||
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
|
||||
assert len(filtered) == 3
|
||||
|
||||
|
||||
class TestFormatSearchResults:
|
||||
"""Tests for format_search_results function."""
|
||||
|
||||
def test_empty_results(self):
|
||||
"""Test formatting empty results list."""
|
||||
formatted = format_search_results([])
|
||||
assert formatted == "No relevant documents found for your query."
|
||||
|
||||
def test_single_result(self):
|
||||
"""Test formatting single result."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1.txt", "distance": 0.95, "content": "This is the content."}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = "<document 1 name=doc1.txt>\nThis is the content.\n</document 1>"
|
||||
assert formatted == expected
|
||||
|
||||
def test_multiple_results(self):
|
||||
"""Test formatting multiple results."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1.txt", "distance": 0.95, "content": "First document content."},
|
||||
{"id": "doc2.txt", "distance": 0.85, "content": "Second document content."},
|
||||
{"id": "doc3.txt", "distance": 0.75, "content": "Third document content."},
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = (
|
||||
"<document 1 name=doc1.txt>\nFirst document content.\n</document 1>\n"
|
||||
"<document 2 name=doc2.txt>\nSecond document content.\n</document 2>\n"
|
||||
"<document 3 name=doc3.txt>\nThird document content.\n</document 3>"
|
||||
)
|
||||
assert formatted == expected
|
||||
|
||||
def test_multiline_content(self):
|
||||
"""Test formatting results with multiline content."""
|
||||
results: list[SearchResult] = [
|
||||
{
|
||||
"id": "doc1.txt",
|
||||
"distance": 0.95,
|
||||
"content": "Line 1\nLine 2\nLine 3"
|
||||
}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = "<document 1 name=doc1.txt>\nLine 1\nLine 2\nLine 3\n</document 1>"
|
||||
assert formatted == expected
|
||||
|
||||
def test_special_characters_in_content(self):
|
||||
"""Test formatting with special characters in content."""
|
||||
results: list[SearchResult] = [
|
||||
{
|
||||
"id": "doc1.txt",
|
||||
"distance": 0.95,
|
||||
"content": "Content with <special> & \"characters\""
|
||||
}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = '<document 1 name=doc1.txt>\nContent with <special> & "characters"\n</document 1>'
|
||||
assert formatted == expected
|
||||
|
||||
def test_special_characters_in_document_id(self):
|
||||
"""Test formatting with special characters in document ID."""
|
||||
results: list[SearchResult] = [
|
||||
{
|
||||
"id": "path/to/doc-name_v2.txt",
|
||||
"distance": 0.95,
|
||||
"content": "Some content"
|
||||
}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = "<document 1 name=path/to/doc-name_v2.txt>\nSome content\n</document 1>"
|
||||
assert formatted == expected
|
||||
|
||||
def test_empty_content(self):
|
||||
"""Test formatting result with empty content."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1.txt", "distance": 0.95, "content": ""}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
expected = "<document 1 name=doc1.txt>\n\n</document 1>"
|
||||
assert formatted == expected
|
||||
|
||||
def test_document_numbering(self):
|
||||
"""Test that document numbering starts at 1 and increments correctly."""
|
||||
results: list[SearchResult] = [
|
||||
{"id": "a.txt", "distance": 0.9, "content": "A"},
|
||||
{"id": "b.txt", "distance": 0.8, "content": "B"},
|
||||
{"id": "c.txt", "distance": 0.7, "content": "C"},
|
||||
{"id": "d.txt", "distance": 0.6, "content": "D"},
|
||||
{"id": "e.txt", "distance": 0.5, "content": "E"},
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
|
||||
assert "<document 1 name=a.txt>" in formatted
|
||||
assert "</document 1>" in formatted
|
||||
assert "<document 2 name=b.txt>" in formatted
|
||||
assert "</document 2>" in formatted
|
||||
assert "<document 3 name=c.txt>" in formatted
|
||||
assert "</document 3>" in formatted
|
||||
assert "<document 4 name=d.txt>" in formatted
|
||||
assert "</document 4>" in formatted
|
||||
assert "<document 5 name=e.txt>" in formatted
|
||||
assert "</document 5>" in formatted
|
||||
|
||||
def test_very_long_content(self):
|
||||
"""Test formatting with very long content."""
|
||||
long_content = "A" * 10000
|
||||
results: list[SearchResult] = [
|
||||
{"id": "doc1.txt", "distance": 0.95, "content": long_content}
|
||||
]
|
||||
formatted = format_search_results(results)
|
||||
assert f"<document 1 name=doc1.txt>\n{long_content}\n</document 1>" == formatted
|
||||
assert len(formatted) > 10000
|
||||
272
tests/test_semantic_cache.py
Normal file
272
tests/test_semantic_cache.py
Normal file
@@ -0,0 +1,272 @@
|
||||
"""Tests for the semantic cache service and its integration."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from knowledge_search_mcp.__main__ import knowledge_search
|
||||
from knowledge_search_mcp.models import AppContext, SearchResult, SourceNamespace
|
||||
from knowledge_search_mcp.services.semantic_cache import KnowledgeSemanticCache
|
||||
|
||||
|
||||
class TestKnowledgeSemanticCache:
|
||||
"""Unit tests for the KnowledgeSemanticCache wrapper."""
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
def test_init_creates_cache(self, mock_sc_cls, mock_vec_cls):
|
||||
"""Test that __init__ creates the SemanticCache with correct params."""
|
||||
mock_vectorizer = MagicMock()
|
||||
mock_vec_cls.return_value = mock_vectorizer
|
||||
|
||||
KnowledgeSemanticCache(
|
||||
redis_url="redis://localhost:6379",
|
||||
name="test_cache",
|
||||
vector_dims=3072,
|
||||
distance_threshold=0.12,
|
||||
ttl=3600,
|
||||
)
|
||||
|
||||
mock_vec_cls.assert_called_once()
|
||||
mock_sc_cls.assert_called_once_with(
|
||||
name="test_cache",
|
||||
distance_threshold=0.12,
|
||||
ttl=3600,
|
||||
redis_url="redis://localhost:6379",
|
||||
vectorizer=mock_vectorizer,
|
||||
overwrite=False,
|
||||
)
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
async def test_check_returns_response_on_hit(self, mock_sc_cls, _mock_vec_cls):
|
||||
"""Test cache check returns response when a similar vector is found."""
|
||||
mock_inner = MagicMock()
|
||||
mock_inner.acheck = AsyncMock(return_value=[
|
||||
{"response": "cached answer", "prompt": "original q", "vector_distance": 0.05},
|
||||
])
|
||||
mock_sc_cls.return_value = mock_inner
|
||||
|
||||
cache = KnowledgeSemanticCache(redis_url="redis://localhost:6379")
|
||||
result = await cache.check([0.1] * 3072)
|
||||
|
||||
assert result == "cached answer"
|
||||
mock_inner.acheck.assert_awaited_once_with(
|
||||
vector=[0.1] * 3072,
|
||||
num_results=1,
|
||||
)
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
async def test_check_returns_none_on_miss(self, mock_sc_cls, _mock_vec_cls):
|
||||
"""Test cache check returns None when no similar vector is found."""
|
||||
mock_inner = MagicMock()
|
||||
mock_inner.acheck = AsyncMock(return_value=[])
|
||||
mock_sc_cls.return_value = mock_inner
|
||||
|
||||
cache = KnowledgeSemanticCache(redis_url="redis://localhost:6379")
|
||||
result = await cache.check([0.1] * 3072)
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
async def test_check_returns_none_on_error(self, mock_sc_cls, _mock_vec_cls):
|
||||
"""Test cache check degrades gracefully on Redis errors."""
|
||||
mock_inner = MagicMock()
|
||||
mock_inner.acheck = AsyncMock(side_effect=ConnectionError("Redis down"))
|
||||
mock_sc_cls.return_value = mock_inner
|
||||
|
||||
cache = KnowledgeSemanticCache(redis_url="redis://localhost:6379")
|
||||
result = await cache.check([0.1] * 3072)
|
||||
|
||||
assert result is None
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
async def test_store_calls_astore(self, mock_sc_cls, _mock_vec_cls):
|
||||
"""Test store delegates to SemanticCache.astore."""
|
||||
mock_inner = MagicMock()
|
||||
mock_inner.astore = AsyncMock()
|
||||
mock_sc_cls.return_value = mock_inner
|
||||
|
||||
cache = KnowledgeSemanticCache(redis_url="redis://localhost:6379")
|
||||
await cache.store("query", "response", [0.1] * 3072, {"key": "val"})
|
||||
|
||||
mock_inner.astore.assert_awaited_once_with(
|
||||
prompt="query",
|
||||
response="response",
|
||||
vector=[0.1] * 3072,
|
||||
metadata={"key": "val"},
|
||||
)
|
||||
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.CustomVectorizer")
|
||||
@patch("knowledge_search_mcp.services.semantic_cache.SemanticCache")
|
||||
async def test_store_does_not_raise_on_error(self, mock_sc_cls, _mock_vec_cls):
|
||||
"""Test store degrades gracefully on Redis errors."""
|
||||
mock_inner = MagicMock()
|
||||
mock_inner.astore = AsyncMock(side_effect=ConnectionError("Redis down"))
|
||||
mock_sc_cls.return_value = mock_inner
|
||||
|
||||
cache = KnowledgeSemanticCache(redis_url="redis://localhost:6379")
|
||||
await cache.store("query", "response", [0.1] * 3072)
|
||||
|
||||
|
||||
class TestKnowledgeSearchCacheIntegration:
|
||||
"""Tests for cache integration in the knowledge_search tool."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_cache(self):
|
||||
"""Create a mock KnowledgeSemanticCache."""
|
||||
cache = MagicMock(spec=KnowledgeSemanticCache)
|
||||
cache.check = AsyncMock(return_value=None)
|
||||
cache.store = AsyncMock()
|
||||
return cache
|
||||
|
||||
@pytest.fixture
|
||||
def mock_app_context(self, mock_cache):
|
||||
"""Create a mock AppContext with semantic cache."""
|
||||
app = MagicMock(spec=AppContext)
|
||||
app.genai_client = MagicMock()
|
||||
app.vector_search = MagicMock()
|
||||
app.vector_search.async_run_query = AsyncMock()
|
||||
app.settings = MagicMock()
|
||||
app.settings.embedding_model = "gemini-embedding-001"
|
||||
app.settings.deployed_index_id = "test-deployed-index"
|
||||
app.settings.search_limit = 10
|
||||
app.semantic_cache = mock_cache
|
||||
return app
|
||||
|
||||
@pytest.fixture
|
||||
def mock_context(self, mock_app_context):
|
||||
"""Create a mock MCP Context."""
|
||||
ctx = MagicMock()
|
||||
ctx.request_context.lifespan_context = mock_app_context
|
||||
return ctx
|
||||
|
||||
@pytest.fixture
|
||||
def sample_embedding(self):
|
||||
return [0.1] * 3072
|
||||
|
||||
@pytest.fixture
|
||||
def sample_results(self) -> list[SearchResult]:
|
||||
return [
|
||||
{"id": "doc1", "distance": 0.95, "content": "Content 1"},
|
||||
{"id": "doc2", "distance": 0.90, "content": "Content 2"},
|
||||
]
|
||||
|
||||
@patch("knowledge_search_mcp.__main__.generate_query_embedding")
|
||||
async def test_cache_hit_skips_vector_search(
|
||||
self, mock_generate, mock_context, sample_embedding, mock_cache
|
||||
):
|
||||
"""On cache hit, vector search is never called."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_cache.check.return_value = "cached result"
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert result == "cached result"
|
||||
mock_cache.check.assert_awaited_once_with(sample_embedding)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_not_called()
|
||||
mock_cache.store.assert_not_awaited()
|
||||
|
||||
@patch("knowledge_search_mcp.__main__.generate_query_embedding")
|
||||
@patch("knowledge_search_mcp.__main__.filter_search_results")
|
||||
@patch("knowledge_search_mcp.__main__.format_search_results")
|
||||
async def test_cache_miss_stores_result(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_results,
|
||||
mock_cache,
|
||||
):
|
||||
"""On cache miss, results are fetched and stored in cache."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_cache.check.return_value = None
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_results
|
||||
mock_filter.return_value = sample_results
|
||||
mock_format.return_value = "formatted results"
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert result == "formatted results"
|
||||
mock_cache.check.assert_awaited_once_with(sample_embedding)
|
||||
mock_cache.store.assert_awaited_once_with(
|
||||
"test query", "formatted results", sample_embedding,
|
||||
)
|
||||
|
||||
@patch("knowledge_search_mcp.__main__.generate_query_embedding")
|
||||
@patch("knowledge_search_mcp.__main__.filter_search_results")
|
||||
@patch("knowledge_search_mcp.__main__.format_search_results")
|
||||
async def test_cache_skipped_when_source_filter_set(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_results,
|
||||
mock_cache,
|
||||
):
|
||||
"""Cache is bypassed when a source filter is specified."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_results
|
||||
mock_filter.return_value = sample_results
|
||||
mock_format.return_value = "formatted results"
|
||||
|
||||
result = await knowledge_search(
|
||||
"test query", mock_context, source=SourceNamespace.EDUCACION_FINANCIERA,
|
||||
)
|
||||
|
||||
assert result == "formatted results"
|
||||
mock_cache.check.assert_not_awaited()
|
||||
mock_cache.store.assert_not_awaited()
|
||||
|
||||
@patch("knowledge_search_mcp.__main__.generate_query_embedding")
|
||||
@patch("knowledge_search_mcp.__main__.filter_search_results")
|
||||
@patch("knowledge_search_mcp.__main__.format_search_results")
|
||||
async def test_cache_not_stored_when_no_results(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
mock_cache,
|
||||
):
|
||||
"""Empty results are not stored in the cache."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_cache.check.return_value = None
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = []
|
||||
mock_filter.return_value = []
|
||||
mock_format.return_value = "No relevant documents found for your query."
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert result == "No relevant documents found for your query."
|
||||
mock_cache.store.assert_not_awaited()
|
||||
|
||||
@patch("knowledge_search_mcp.__main__.generate_query_embedding")
|
||||
@patch("knowledge_search_mcp.__main__.filter_search_results")
|
||||
@patch("knowledge_search_mcp.__main__.format_search_results")
|
||||
async def test_works_without_cache(
|
||||
self,
|
||||
mock_format,
|
||||
mock_filter,
|
||||
mock_generate,
|
||||
mock_context,
|
||||
sample_embedding,
|
||||
sample_results,
|
||||
):
|
||||
"""Tool works normally when semantic_cache is None."""
|
||||
mock_generate.return_value = (sample_embedding, None)
|
||||
mock_context.request_context.lifespan_context.semantic_cache = None
|
||||
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_results
|
||||
mock_filter.return_value = sample_results
|
||||
mock_format.return_value = "formatted results"
|
||||
|
||||
result = await knowledge_search("test query", mock_context)
|
||||
|
||||
assert result == "formatted results"
|
||||
436
tests/test_validation_services.py
Normal file
436
tests/test_validation_services.py
Normal file
@@ -0,0 +1,436 @@
|
||||
"""Tests for validation service functions."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from aiohttp import ClientResponse
|
||||
|
||||
from knowledge_search_mcp.services.validation import (
|
||||
validate_genai_access,
|
||||
validate_gcs_access,
|
||||
validate_vector_search_access,
|
||||
)
|
||||
from knowledge_search_mcp.config import Settings
|
||||
|
||||
|
||||
class TestValidateGenAIAccess:
|
||||
"""Tests for validate_genai_access function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(self):
|
||||
"""Create mock settings."""
|
||||
settings = MagicMock(spec=Settings)
|
||||
settings.embedding_model = "models/text-embedding-004"
|
||||
settings.project_id = "test-project"
|
||||
settings.location = "us-central1"
|
||||
return settings
|
||||
|
||||
@pytest.fixture
|
||||
def mock_genai_client(self):
|
||||
"""Create a mock genai client."""
|
||||
client = MagicMock()
|
||||
client.aio = MagicMock()
|
||||
client.aio.models = MagicMock()
|
||||
return client
|
||||
|
||||
async def test_successful_validation(self, mock_genai_client, mock_settings):
|
||||
"""Test successful GenAI access validation."""
|
||||
# Setup mock response
|
||||
mock_response = MagicMock()
|
||||
mock_embedding = MagicMock()
|
||||
mock_embedding.values = [0.1] * 768 # Typical embedding dimension
|
||||
mock_response.embeddings = [mock_embedding]
|
||||
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
|
||||
|
||||
# Execute
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
# Assert
|
||||
assert error is None
|
||||
mock_genai_client.aio.models.embed_content.assert_called_once()
|
||||
call_kwargs = mock_genai_client.aio.models.embed_content.call_args.kwargs
|
||||
assert call_kwargs["model"] == "models/text-embedding-004"
|
||||
assert call_kwargs["contents"] == "test"
|
||||
assert call_kwargs["config"].task_type == "RETRIEVAL_QUERY"
|
||||
|
||||
async def test_empty_response(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of empty response."""
|
||||
mock_response = MagicMock()
|
||||
mock_response.embeddings = []
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error == "Embedding validation returned empty response"
|
||||
|
||||
async def test_none_response(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of None response."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=None)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error == "Embedding validation returned empty response"
|
||||
|
||||
async def test_api_permission_error(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of permission denied error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=PermissionError("Permission denied for GenAI API")
|
||||
)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GenAI:" in error
|
||||
assert "Permission denied for GenAI API" in error
|
||||
|
||||
async def test_api_quota_error(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of quota exceeded error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=Exception("Quota exceeded")
|
||||
)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GenAI:" in error
|
||||
assert "Quota exceeded" in error
|
||||
|
||||
async def test_network_error(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of network error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=ConnectionError("Network unreachable")
|
||||
)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GenAI:" in error
|
||||
assert "Network unreachable" in error
|
||||
|
||||
async def test_invalid_model_error(self, mock_genai_client, mock_settings):
|
||||
"""Test handling of invalid model error."""
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(
|
||||
side_effect=ValueError("Invalid model name")
|
||||
)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GenAI:" in error
|
||||
assert "Invalid model name" in error
|
||||
|
||||
async def test_embeddings_with_zero_values(self, mock_genai_client, mock_settings):
|
||||
"""Test validation with empty embedding values."""
|
||||
mock_response = MagicMock()
|
||||
mock_embedding = MagicMock()
|
||||
mock_embedding.values = []
|
||||
mock_response.embeddings = [mock_embedding]
|
||||
|
||||
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
|
||||
|
||||
error = await validate_genai_access(mock_genai_client, mock_settings)
|
||||
|
||||
# Should succeed even with empty values, as long as embeddings exist
|
||||
assert error is None
|
||||
|
||||
|
||||
class TestValidateGCSAccess:
|
||||
"""Tests for validate_gcs_access function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(self):
|
||||
"""Create mock settings."""
|
||||
settings = MagicMock(spec=Settings)
|
||||
settings.bucket = "test-bucket"
|
||||
settings.project_id = "test-project"
|
||||
return settings
|
||||
|
||||
@pytest.fixture
|
||||
def mock_vector_search(self):
|
||||
"""Create a mock vector search client."""
|
||||
vs = MagicMock()
|
||||
vs.storage = MagicMock()
|
||||
return vs
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(self):
|
||||
"""Create a mock aiohttp session."""
|
||||
session = MagicMock()
|
||||
return session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_response(self):
|
||||
"""Create a mock HTTP response."""
|
||||
response = MagicMock()
|
||||
response.text = AsyncMock(return_value='{"items": []}')
|
||||
return response
|
||||
|
||||
async def test_successful_validation(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test successful GCS bucket access validation."""
|
||||
# Setup mocks
|
||||
mock_response.status = 200
|
||||
mock_response.ok = True
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(return_value="fake-access-token")
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is None
|
||||
mock_session.get.assert_called_once()
|
||||
call_args = mock_session.get.call_args
|
||||
assert "test-bucket" in call_args[0][0]
|
||||
assert call_args[1]["headers"]["Authorization"] == "Bearer fake-access-token"
|
||||
|
||||
async def test_access_denied_403(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 403 access denied."""
|
||||
mock_response.status = 403
|
||||
mock_response.ok = False
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(return_value="fake-access-token")
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Access denied to bucket 'test-bucket'" in error
|
||||
assert "permissions" in error.lower()
|
||||
|
||||
async def test_bucket_not_found_404(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 404 bucket not found."""
|
||||
mock_response.status = 404
|
||||
mock_response.ok = False
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(return_value="fake-access-token")
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Bucket 'test-bucket' not found" in error
|
||||
assert "bucket name" in error.lower()
|
||||
|
||||
async def test_server_error_500(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 500 server error."""
|
||||
mock_response.status = 500
|
||||
mock_response.ok = False
|
||||
mock_response.text = AsyncMock(return_value='{"error": "Internal server error"}')
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(return_value="fake-access-token")
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Failed to access bucket 'test-bucket': 500" in error
|
||||
|
||||
async def test_token_acquisition_error(self, mock_vector_search, mock_settings, mock_session):
|
||||
"""Test handling of token acquisition error."""
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(side_effect=Exception("Failed to get access token"))
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GCS:" in error
|
||||
assert "Failed to get access token" in error
|
||||
|
||||
async def test_network_error(self, mock_vector_search, mock_settings, mock_session):
|
||||
"""Test handling of network error."""
|
||||
mock_session.get = MagicMock(side_effect=ConnectionError("Network unreachable"))
|
||||
mock_vector_search.storage._get_aio_session.return_value = mock_session
|
||||
|
||||
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
|
||||
mock_token = MockToken.return_value
|
||||
mock_token.get = AsyncMock(return_value="fake-access-token")
|
||||
|
||||
error = await validate_gcs_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "GCS:" in error
|
||||
assert "Network unreachable" in error
|
||||
|
||||
|
||||
class TestValidateVectorSearchAccess:
|
||||
"""Tests for validate_vector_search_access function."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(self):
|
||||
"""Create mock settings."""
|
||||
settings = MagicMock(spec=Settings)
|
||||
settings.endpoint_name = "projects/test/locations/us-central1/indexEndpoints/test-endpoint"
|
||||
settings.location = "us-central1"
|
||||
return settings
|
||||
|
||||
@pytest.fixture
|
||||
def mock_vector_search(self):
|
||||
"""Create a mock vector search client."""
|
||||
vs = MagicMock()
|
||||
vs._async_get_auth_headers = AsyncMock(return_value={"Authorization": "Bearer fake-token"})
|
||||
return vs
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(self):
|
||||
"""Create a mock aiohttp session."""
|
||||
session = MagicMock()
|
||||
return session
|
||||
|
||||
@pytest.fixture
|
||||
def mock_response(self):
|
||||
"""Create a mock HTTP response."""
|
||||
response = MagicMock()
|
||||
response.text = AsyncMock(return_value='{"name": "test-endpoint"}')
|
||||
return response
|
||||
|
||||
async def test_successful_validation(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test successful vector search endpoint validation."""
|
||||
mock_response.status = 200
|
||||
mock_response.ok = True
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is None
|
||||
mock_vector_search._async_get_auth_headers.assert_called_once()
|
||||
mock_session.get.assert_called_once()
|
||||
call_args = mock_session.get.call_args
|
||||
assert "us-central1-aiplatform.googleapis.com" in call_args[0][0]
|
||||
assert "test-endpoint" in call_args[0][0]
|
||||
assert call_args[1]["headers"]["Authorization"] == "Bearer fake-token"
|
||||
|
||||
async def test_access_denied_403(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 403 access denied."""
|
||||
mock_response.status = 403
|
||||
mock_response.ok = False
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Access denied to endpoint" in error
|
||||
assert "test-endpoint" in error
|
||||
assert "permissions" in error.lower()
|
||||
|
||||
async def test_endpoint_not_found_404(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 404 endpoint not found."""
|
||||
mock_response.status = 404
|
||||
mock_response.ok = False
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "not found" in error.lower()
|
||||
assert "test-endpoint" in error
|
||||
|
||||
async def test_server_error_503(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test handling of 503 service unavailable."""
|
||||
mock_response.status = 503
|
||||
mock_response.ok = False
|
||||
mock_response.text = AsyncMock(return_value='{"error": "Service unavailable"}')
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Failed to access endpoint" in error
|
||||
assert "503" in error
|
||||
|
||||
async def test_auth_header_error(self, mock_vector_search, mock_settings):
|
||||
"""Test handling of authentication header error."""
|
||||
mock_vector_search._async_get_auth_headers = AsyncMock(
|
||||
side_effect=Exception("Failed to get auth headers")
|
||||
)
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Vector Search:" in error
|
||||
assert "Failed to get auth headers" in error
|
||||
|
||||
async def test_network_timeout(self, mock_vector_search, mock_settings, mock_session):
|
||||
"""Test handling of network timeout."""
|
||||
mock_session.get = MagicMock(side_effect=TimeoutError("Request timed out"))
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Vector Search:" in error
|
||||
assert "Request timed out" in error
|
||||
|
||||
async def test_connection_error(self, mock_vector_search, mock_settings, mock_session):
|
||||
"""Test handling of connection error."""
|
||||
mock_session.get = MagicMock(side_effect=ConnectionError("Connection refused"))
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is not None
|
||||
assert "Vector Search:" in error
|
||||
assert "Connection refused" in error
|
||||
|
||||
async def test_endpoint_url_construction(self, mock_vector_search, mock_settings, mock_session, mock_response):
|
||||
"""Test that endpoint URL is constructed correctly."""
|
||||
mock_response.status = 200
|
||||
mock_response.ok = True
|
||||
mock_session.get = MagicMock()
|
||||
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
|
||||
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||
|
||||
mock_vector_search._get_aio_session.return_value = mock_session
|
||||
|
||||
# Custom location
|
||||
mock_settings.location = "europe-west1"
|
||||
mock_settings.endpoint_name = "projects/my-project/locations/europe-west1/indexEndpoints/my-endpoint"
|
||||
|
||||
error = await validate_vector_search_access(mock_vector_search, mock_settings)
|
||||
|
||||
assert error is None
|
||||
call_args = mock_session.get.call_args
|
||||
url = call_args[0][0]
|
||||
assert "europe-west1-aiplatform.googleapis.com" in url
|
||||
assert "my-endpoint" in url
|
||||
@@ -1,4 +0,0 @@
|
||||
from .config import Settings, _args, cfg
|
||||
from .logging_setup import log_structured_entry
|
||||
|
||||
__all__ = ['Settings', '_args', 'cfg', 'log_structured_entry']
|
||||
@@ -1,60 +0,0 @@
|
||||
import os
|
||||
import argparse
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, YamlConfigSettingsSource
|
||||
|
||||
|
||||
def _parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--transport",
|
||||
choices=["stdio", "sse", "streamable-http"],
|
||||
default="stdio",
|
||||
)
|
||||
parser.add_argument("--host", default="0.0.0.0")
|
||||
parser.add_argument("--port", type=int, default=8080)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
default=os.environ.get("CONFIG_FILE", "config.yaml"),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
_args = _parse_args()
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Server configuration populated from env vars and a YAML config file."""
|
||||
|
||||
model_config = {"env_file": ".env", "yaml_file": _args.config}
|
||||
|
||||
project_id: str
|
||||
location: str
|
||||
bucket: str
|
||||
index_name: str
|
||||
deployed_index_id: str
|
||||
endpoint_name: str
|
||||
endpoint_domain: str
|
||||
embedding_model: str = "gemini-embedding-001"
|
||||
search_limit: int = 10
|
||||
log_name: str = "va_agent_evaluation_logs"
|
||||
log_level: str = "INFO"
|
||||
|
||||
@classmethod
|
||||
def settings_customise_sources(
|
||||
cls,
|
||||
settings_cls: type[BaseSettings],
|
||||
init_settings: PydanticBaseSettingsSource,
|
||||
env_settings: PydanticBaseSettingsSource,
|
||||
dotenv_settings: PydanticBaseSettingsSource,
|
||||
file_secret_settings: PydanticBaseSettingsSource,
|
||||
) -> tuple[PydanticBaseSettingsSource, ...]:
|
||||
return (
|
||||
init_settings,
|
||||
env_settings,
|
||||
dotenv_settings,
|
||||
YamlConfigSettingsSource(settings_cls),
|
||||
file_secret_settings,
|
||||
)
|
||||
|
||||
|
||||
# Singleton instance of Settings
|
||||
cfg = Settings.model_validate({})
|
||||
@@ -1,48 +0,0 @@
|
||||
"""
|
||||
Centralized Cloud Logging setup.
|
||||
Uses CloudLoggingHandler (background thread) so logging does not add latency
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, Dict, Literal
|
||||
|
||||
import google.cloud.logging
|
||||
from google.cloud.logging.handlers import CloudLoggingHandler
|
||||
|
||||
from .config import cfg
|
||||
|
||||
|
||||
def _setup_logger() -> logging.Logger:
|
||||
"""Create or return the singleton evaluation logger."""
|
||||
logger = logging.getLogger(cfg.log_name)
|
||||
if any(isinstance(h, CloudLoggingHandler) for h in logger.handlers):
|
||||
return logger
|
||||
|
||||
try:
|
||||
client = google.cloud.logging.Client(project=cfg.project_id)
|
||||
handler = CloudLoggingHandler(client, name=cfg.log_name) # async transport
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(getattr(logging, cfg.log_level.upper()))
|
||||
except Exception as e:
|
||||
# Fallback to console if Cloud Logging is unavailable (local dev)
|
||||
logging.basicConfig(level=getattr(logging, cfg.log_level.upper()))
|
||||
logger = logging.getLogger(cfg.log_name)
|
||||
logger.warning("Cloud Logging setup failed; using console. Error: %s", e)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
_eval_log = _setup_logger()
|
||||
|
||||
|
||||
def log_structured_entry(message: str, severity: Literal["INFO", "WARNING", "ERROR"], custom_log: Optional[Dict] = None) -> None:
|
||||
"""
|
||||
Emit a JSON-structured log row.
|
||||
|
||||
Args:
|
||||
message: Short label for the row (e.g., "Final agent turn").
|
||||
severity: "INFO" | "WARNING" | "ERROR"
|
||||
custom_log: A dict with your structured payload.
|
||||
"""
|
||||
level = getattr(logging, severity.upper(), logging.INFO)
|
||||
_eval_log.log(level, message, extra={"json_fields": {"message": message, "custom": custom_log or {}}})
|
||||
348
uv.lock
generated
348
uv.lock
generated
@@ -369,6 +369,90 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.13.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.5"
|
||||
@@ -1123,7 +1207,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" },
|
||||
@@ -1132,7 +1215,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" },
|
||||
@@ -1141,7 +1223,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" },
|
||||
@@ -1150,7 +1231,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" },
|
||||
@@ -1247,6 +1327,66 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hiredis"
|
||||
version = "3.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195, upload-time = "2025-10-14T16:32:14.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808, upload-time = "2025-10-14T16:32:15.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578, upload-time = "2025-10-14T16:32:16.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508, upload-time = "2025-10-14T16:32:17.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341, upload-time = "2025-10-14T16:32:18.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765, upload-time = "2025-10-14T16:32:19.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312, upload-time = "2025-10-14T16:32:20.404Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965, upload-time = "2025-10-14T16:32:21.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533, upload-time = "2025-10-14T16:32:22.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379, upload-time = "2025-10-14T16:32:22.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/39/2b789ebadd1548ccb04a2c18fbc123746ad1a7e248b7f3f3cac618ca10a6/hiredis-3.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b7048b4ec0d5dddc8ddd03da603de0c4b43ef2540bf6e4c54f47d23e3480a4fa", size = 82035, upload-time = "2025-10-14T16:32:23.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/74/4066d9c1093be744158ede277f2a0a4e4cd0fefeaa525c79e2876e9e5c72/hiredis-3.3.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:e5f86ce5a779319c15567b79e0be806e8e92c18bb2ea9153e136312fafa4b7d6", size = 46219, upload-time = "2025-10-14T16:32:24.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/3f/f9e0f6d632f399d95b3635703e1558ffaa2de3aea4cfcbc2d7832606ba43/hiredis-3.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fbdb97a942e66016fff034df48a7a184e2b7dc69f14c4acd20772e156f20d04b", size = 41860, upload-time = "2025-10-14T16:32:25.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/c5/b7dde5ec390dabd1cabe7b364a509c66d4e26de783b0b64cf1618f7149fc/hiredis-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0fb4bea72fe45ff13e93ddd1352b43ff0749f9866263b5cca759a4c960c776f", size = 170094, upload-time = "2025-10-14T16:32:26.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d6/7f05c08ee74d41613be466935688068e07f7b6c55266784b5ace7b35b766/hiredis-3.3.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:85b9baf98050e8f43c2826ab46aaf775090d608217baf7af7882596aef74e7f9", size = 181746, upload-time = "2025-10-14T16:32:27.844Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/d2/aaf9f8edab06fbf5b766e0cae3996324297c0516a91eb2ca3bd1959a0308/hiredis-3.3.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69079fb0f0ebb61ba63340b9c4bce9388ad016092ca157e5772eb2818209d930", size = 180465, upload-time = "2025-10-14T16:32:29.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/1e/93ded8b9b484519b211fc71746a231af98c98928e3ebebb9086ed20bb1ad/hiredis-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17f77b79031ea4b0967d30255d2ae6e7df0603ee2426ad3274067f406938236", size = 172419, upload-time = "2025-10-14T16:32:30.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/13/02880458e02bbfcedcaabb8f7510f9dda1c89d7c1921b1bb28c22bb38cbf/hiredis-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d14f745fc177bc05fc24bdf20e2b515e9a068d3d4cce90a0fb78d04c9c9d9a", size = 166400, upload-time = "2025-10-14T16:32:31.173Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/60/896e03267670570f19f61dc65a2137fcb2b06e83ab0911d58eeec9f3cb88/hiredis-3.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ba063fdf1eff6377a0c409609cbe890389aefddfec109c2d20fcc19cfdafe9da", size = 176845, upload-time = "2025-10-14T16:32:32.12Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/90/a1d4bd0cdcf251fda72ac0bd932f547b48ad3420f89bb2ef91bf6a494534/hiredis-3.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1799cc66353ad066bfdd410135c951959da9f16bcb757c845aab2f21fc4ef099", size = 170365, upload-time = "2025-10-14T16:32:33.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/9a/7c98f7bb76bdb4a6a6003cf8209721f083e65d2eed2b514f4a5514bda665/hiredis-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2cbf71a121996ffac82436b6153290815b746afb010cac19b3290a1644381b07", size = 168022, upload-time = "2025-10-14T16:32:34.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/ca/672ee658ffe9525558615d955b554ecd36aa185acd4431ccc9701c655c9b/hiredis-3.3.0-cp313-cp313-win32.whl", hash = "sha256:a7cbbc6026bf03659f0b25e94bbf6e64f6c8c22f7b4bc52fe569d041de274194", size = 20533, upload-time = "2025-10-14T16:32:35.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/93/511fd94f6a7b6d72a4cf9c2b159bf3d780585a9a1dca52715dd463825299/hiredis-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:a8def89dd19d4e2e4482b7412d453dec4a5898954d9a210d7d05f60576cedef6", size = 22387, upload-time = "2025-10-14T16:32:36.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/b3/b948ee76a6b2bc7e45249861646f91f29704f743b52565cf64cee9c4658b/hiredis-3.3.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c135bda87211f7af9e2fd4e046ab433c576cd17b69e639a0f5bb2eed5e0e71a9", size = 82105, upload-time = "2025-10-14T16:32:37.204Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/9b/4210f4ebfb3ab4ada964b8de08190f54cbac147198fb463cd3c111cc13e0/hiredis-3.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2f855c678230aed6fc29b962ce1cc67e5858a785ef3a3fd6b15dece0487a2e60", size = 46237, upload-time = "2025-10-14T16:32:38.07Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/7a/e38bfd7d04c05036b4ccc6f42b86b1032185cf6ae426e112a97551fece14/hiredis-3.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4059c78a930cbb33c391452ccce75b137d6f89e2eebf6273d75dafc5c2143c03", size = 41894, upload-time = "2025-10-14T16:32:38.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/d3/eae43d9609c5d9a6effef0586ee47e13a0d84b44264b688d97a75cd17ee5/hiredis-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:334a3f1d14c253bb092e187736c3384203bd486b244e726319bbb3f7dffa4a20", size = 170486, upload-time = "2025-10-14T16:32:40.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/fd/34d664554880b27741ab2916d66207357563b1639e2648685f4c84cfb755/hiredis-3.3.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd137b147235447b3d067ec952c5b9b95ca54b71837e1b38dbb2ec03b89f24fc", size = 182031, upload-time = "2025-10-14T16:32:41.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/a3/0c69fdde3f4155b9f7acc64ccffde46f312781469260061b3bbaa487fd34/hiredis-3.3.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f88f4f2aceb73329ece86a1cb0794fdbc8e6d614cb5ca2d1023c9b7eb432db8", size = 180542, upload-time = "2025-10-14T16:32:42.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/7a/ad5da4d7bc241e57c5b0c4fe95aa75d1f2116e6e6c51577394d773216e01/hiredis-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:550f4d1538822fc75ebf8cf63adc396b23d4958bdbbad424521f2c0e3dfcb169", size = 172353, upload-time = "2025-10-14T16:32:43.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/dc/c46eace64eb047a5b31acd5e4b0dc6d2f0390a4a3f6d507442d9efa570ad/hiredis-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54b14211fbd5930fc696f6fcd1f1f364c660970d61af065a80e48a1fa5464dd6", size = 166435, upload-time = "2025-10-14T16:32:44.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/ac/ad13a714e27883a2e4113c980c94caf46b801b810de5622c40f8d3e8335f/hiredis-3.3.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e96f63dbc489fc86f69951e9f83dadb9582271f64f6822c47dcffa6fac7e4a", size = 177218, upload-time = "2025-10-14T16:32:45.936Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/38/268fabd85b225271fe1ba82cb4a484fcc1bf922493ff2c74b400f1a6f339/hiredis-3.3.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:106e99885d46684d62ab3ec1d6b01573cc0e0083ac295b11aaa56870b536c7ec", size = 170477, upload-time = "2025-10-14T16:32:46.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/6b/02bb8af810ea04247334ab7148acff7a61c08a8832830c6703f464be83a9/hiredis-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:087e2ef3206361281b1a658b5b4263572b6ba99465253e827796964208680459", size = 167915, upload-time = "2025-10-14T16:32:47.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/94/901fa817e667b2e69957626395e6dee416e31609dca738f28e6b545ca6c2/hiredis-3.3.0-cp314-cp314-win32.whl", hash = "sha256:80638ebeab1cefda9420e9fedc7920e1ec7b4f0513a6b23d58c9d13c882f8065", size = 21165, upload-time = "2025-10-14T16:32:50.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/7e/4881b9c1d0b4cdaba11bd10e600e97863f977ea9d67c5988f7ec8cd363e5/hiredis-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a68aaf9ba024f4e28cf23df9196ff4e897bd7085872f3a30644dca07fa787816", size = 22996, upload-time = "2025-10-14T16:32:51.543Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/b6/d7e6c17da032665a954a89c1e6ee3bd12cb51cd78c37527842b03519981d/hiredis-3.3.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f7f80442a32ce51ee5d89aeb5a84ee56189a0e0e875f1a57bbf8d462555ae48f", size = 83034, upload-time = "2025-10-14T16:32:52.395Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/6c/6751b698060cdd1b2d8427702cff367c9ed7a1705bcf3792eb5b896f149b/hiredis-3.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a1a67530da714954ed50579f4fe1ab0ddbac9c43643b1721c2cb226a50dde263", size = 46701, upload-time = "2025-10-14T16:32:53.572Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/8e/20a5cf2c83c7a7e08c76b9abab113f99f71cd57468a9c7909737ce6e9bf8/hiredis-3.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:616868352e47ab355559adca30f4f3859f9db895b4e7bc71e2323409a2add751", size = 42381, upload-time = "2025-10-14T16:32:54.762Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/0a/547c29c06e8c9c337d0df3eec39da0cf1aad701daf8a9658dd37f25aca66/hiredis-3.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e799b79f3150083e9702fc37e6243c0bd47a443d6eae3f3077b0b3f510d6a145", size = 180313, upload-time = "2025-10-14T16:32:55.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/8a/488de5469e3d0921a1c425045bf00e983d48b2111a90e47cf5769eaa536c/hiredis-3.3.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ef1dfb0d2c92c3701655e2927e6bbe10c499aba632c7ea57b6392516df3864b", size = 190488, upload-time = "2025-10-14T16:32:56.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/59/8493edc3eb9ae0dbea2b2230c2041a52bc03e390b02ffa3ac0bca2af9aea/hiredis-3.3.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c290da6bc2a57e854c7da9956cd65013483ede935677e84560da3b848f253596", size = 189210, upload-time = "2025-10-14T16:32:57.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/de/8c9a653922057b32fb1e2546ecd43ef44c9aa1a7cf460c87cae507eb2bc7/hiredis-3.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd8c438d9e1728f0085bf9b3c9484d19ec31f41002311464e75b69550c32ffa8", size = 180972, upload-time = "2025-10-14T16:32:58.737Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/a3/51e6e6afaef2990986d685ca6e254ffbd191f1635a59b2d06c9e5d10c8a2/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1bbc6b8a88bbe331e3ebf6685452cebca6dfe6d38a6d4efc5651d7e363ba28bd", size = 175315, upload-time = "2025-10-14T16:32:59.774Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/54/e436312feb97601f70f8b39263b8da5ac4a5d18305ebdfb08ad7621f6119/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:55d8c18fe9a05496c5c04e6eccc695169d89bf358dff964bcad95696958ec05f", size = 185653, upload-time = "2025-10-14T16:33:00.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/a3/88e66030d066337c6c0f883a912c6d4b2d6d7173490fbbc113a6cbe414ff/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:4ddc79afa76b805d364e202a754666cb3c4d9c85153cbfed522871ff55827838", size = 179032, upload-time = "2025-10-14T16:33:01.711Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/1f/fb7375467e9adaa371cd617c2984fefe44bdce73add4c70b8dd8cab1b33a/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e8a4b8540581dcd1b2b25827a54cfd538e0afeaa1a0e3ca87ad7126965981cc", size = 176127, upload-time = "2025-10-14T16:33:02.793Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/14/0dc2b99209c400f3b8f24067273e9c3cb383d894e155830879108fb19e98/hiredis-3.3.0-cp314-cp314t-win32.whl", hash = "sha256:298593bb08487753b3afe6dc38bac2532e9bac8dcee8d992ef9977d539cc6776", size = 22024, upload-time = "2025-10-14T16:33:03.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
@@ -1317,6 +1457,24 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpath-ng"
|
||||
version = "1.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/32/58/250751940d75c8019659e15482d548a4aa3b6ce122c515102a4bfdac50e3/jsonpath_ng-1.8.0.tar.gz", hash = "sha256:54252968134b5e549ea5b872f1df1168bd7defe1a52fed5a358c194e1943ddc3", size = 74513, upload-time = "2026-02-24T14:42:06.182Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/03/99/33c7d78a3fb70d545fd5411ac67a651c81602cc09c9cf0df383733f068c5/jsonpath_ng-1.8.0-py3-none-any.whl", hash = "sha256:b8dde192f8af58d646fc031fac9c99fe4d00326afc4148f1f043c601a8cfe138", size = 67844, upload-time = "2026-02-28T00:53:19.637Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.26.0"
|
||||
@@ -1347,7 +1505,7 @@ wheels = [
|
||||
[[package]]
|
||||
name = "knowledge-search-mcp"
|
||||
version = "0.1.0"
|
||||
source = { virtual = "." }
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "gcloud-aio-auth" },
|
||||
@@ -1357,11 +1515,16 @@ dependencies = [
|
||||
{ name = "mcp", extra = ["cli"] },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "redis", extra = ["hiredis"] },
|
||||
{ name = "redisvl" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "google-adk" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "ruff" },
|
||||
{ name = "ty" },
|
||||
]
|
||||
@@ -1376,11 +1539,16 @@ requires-dist = [
|
||||
{ name = "mcp", extras = ["cli"], specifier = ">=1.26.0" },
|
||||
{ name = "pydantic-settings", specifier = ">=2.9.1" },
|
||||
{ name = "pyyaml", specifier = ">=6.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = ">=5.0.0,<7" },
|
||||
{ name = "redisvl", specifier = ">=0.6.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "google-adk", specifier = ">=1.25.1" },
|
||||
{ name = "pytest", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.24.0" },
|
||||
{ name = "pytest-cov", specifier = ">=6.0.0" },
|
||||
{ name = "ruff", specifier = ">=0.15.2" },
|
||||
{ name = "ty", specifier = ">=0.0.18" },
|
||||
]
|
||||
@@ -1512,6 +1680,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ml-dtypes"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fd/15/76f86faa0902836cc133939732f7611ace68cf54148487a99c539c272dc8/ml_dtypes-0.4.1.tar.gz", hash = "sha256:fad5f2de464fd09127e49b7fd1252b9006fb43d2edc1ff112d390c324af5ca7a", size = 692594, upload-time = "2024-09-13T19:07:11.624Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/1a/99e924f12e4b62139fbac87419698c65f956d58de0dbfa7c028fa5b096aa/ml_dtypes-0.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:827d3ca2097085cf0355f8fdf092b888890bb1b1455f52801a2d7756f056f54b", size = 405077, upload-time = "2024-09-13T19:06:57.538Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/8c/7b610bd500617854c8cc6ed7c8cfb9d48d6a5c21a1437a36a4b9bc8a3598/ml_dtypes-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:772426b08a6172a891274d581ce58ea2789cc8abc1c002a27223f314aaf894e7", size = 2181554, upload-time = "2024-09-13T19:06:59.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/c6/f89620cecc0581dc1839e218c4315171312e46c62a62da6ace204bda91c0/ml_dtypes-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126e7d679b8676d1a958f2651949fbfa182832c3cd08020d8facd94e4114f3e9", size = 2160488, upload-time = "2024-09-13T19:07:03.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/a742d3c31b2cc8557a48efdde53427fd5f9caa2fa3c9c27d826e78a66f51/ml_dtypes-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:df0fb650d5c582a9e72bb5bd96cfebb2cdb889d89daff621c8fbc60295eba66c", size = 127462, upload-time = "2024-09-13T19:07:04.916Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mmh3"
|
||||
version = "5.2.0"
|
||||
@@ -1691,6 +1874,67 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a", size = 16667963, upload-time = "2026-01-31T23:10:52.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1", size = 14693571, upload-time = "2026-01-31T23:10:54.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e", size = 5203469, upload-time = "2026-01-31T23:10:57.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27", size = 6550820, upload-time = "2026-01-31T23:10:59.429Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548", size = 15663067, upload-time = "2026-01-31T23:11:01.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f", size = 16619782, upload-time = "2026-01-31T23:11:03.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460", size = 17013128, upload-time = "2026-01-31T23:11:05.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba", size = 18345324, upload-time = "2026-01-31T23:11:08.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f", size = 5960282, upload-time = "2026-01-31T23:11:10.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85", size = 12314210, upload-time = "2026-01-31T23:11:12.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa", size = 10220171, upload-time = "2026-01-31T23:11:14.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c", size = 16663696, upload-time = "2026-01-31T23:11:17.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979", size = 14688322, upload-time = "2026-01-31T23:11:19.883Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98", size = 5198157, upload-time = "2026-01-31T23:11:22.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef", size = 6546330, upload-time = "2026-01-31T23:11:23.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7", size = 15660968, upload-time = "2026-01-31T23:11:25.713Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499", size = 16607311, upload-time = "2026-01-31T23:11:28.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb", size = 17012850, upload-time = "2026-01-31T23:11:30.888Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7", size = 18334210, upload-time = "2026-01-31T23:11:33.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110", size = 5958199, upload-time = "2026-01-31T23:11:35.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622", size = 12310848, upload-time = "2026-01-31T23:11:38.001Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71", size = 10221082, upload-time = "2026-01-31T23:11:40.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262", size = 14815866, upload-time = "2026-01-31T23:11:42.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913", size = 5325631, upload-time = "2026-01-31T23:11:44.7Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab", size = 6646254, upload-time = "2026-01-31T23:11:46.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82", size = 15720138, upload-time = "2026-01-31T23:11:48.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f", size = 16655398, upload-time = "2026-01-31T23:11:50.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554", size = 17079064, upload-time = "2026-01-31T23:11:52.927Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257", size = 18379680, upload-time = "2026-01-31T23:11:55.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657", size = 6082433, upload-time = "2026-01-31T23:11:58.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b", size = 12451181, upload-time = "2026-01-31T23:11:59.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1", size = 10290756, upload-time = "2026-01-31T23:12:02.438Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b", size = 16663092, upload-time = "2026-01-31T23:12:04.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000", size = 14698770, upload-time = "2026-01-31T23:12:06.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1", size = 5208562, upload-time = "2026-01-31T23:12:09.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74", size = 6543710, upload-time = "2026-01-31T23:12:11.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a", size = 15677205, upload-time = "2026-01-31T23:12:14.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325", size = 16611738, upload-time = "2026-01-31T23:12:16.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909", size = 17028888, upload-time = "2026-01-31T23:12:19.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a", size = 18339556, upload-time = "2026-01-31T23:12:21.816Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a", size = 6006899, upload-time = "2026-01-31T23:12:24.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75", size = 12443072, upload-time = "2026-01-31T23:12:26.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05", size = 10494886, upload-time = "2026-01-31T23:12:28.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308", size = 14818567, upload-time = "2026-01-31T23:12:30.709Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef", size = 5328372, upload-time = "2026-01-31T23:12:32.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d", size = 6649306, upload-time = "2026-01-31T23:12:34.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8", size = 15722394, upload-time = "2026-01-31T23:12:36.565Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5", size = 16653343, upload-time = "2026-01-31T23:12:39.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e", size = 17078045, upload-time = "2026-01-31T23:12:42.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a", size = 18380024, upload-time = "2026-01-31T23:12:44.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443", size = 6153937, upload-time = "2026-01-31T23:12:47.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236", size = 12631844, upload-time = "2026-01-31T23:12:48.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181", size = 10565379, upload-time = "2026-01-31T23:12:51.345Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.38.0"
|
||||
@@ -1842,6 +2086,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "propcache"
|
||||
version = "0.4.1"
|
||||
@@ -2171,6 +2424,49 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "1.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "7.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.9.0.post0"
|
||||
@@ -2201,6 +2497,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-ulid"
|
||||
version = "3.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/7e/0d6c82b5ccc71e7c833aed43d9e8468e1f2ff0be1b3f657a6fcafbb8433d/python_ulid-3.1.0.tar.gz", hash = "sha256:ff0410a598bc5f6b01b602851a3296ede6f91389f913a5d5f8c496003836f636", size = 93175, upload-time = "2025-08-18T16:09:26.305Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/a0/4ed6632b70a52de845df056654162acdebaf97c20e3212c559ac43e7216e/python_ulid-3.1.0-py3-none-any.whl", hash = "sha256:e2cdc979c8c877029b4b7a38a6fba3bc4578e4f109a308419ff4d3ccf0a46619", size = 11577, upload-time = "2025-08-18T16:09:25.047Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pywin32"
|
||||
version = "311"
|
||||
@@ -2263,6 +2568,39 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "6.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
hiredis = [
|
||||
{ name = "hiredis" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redisvl"
|
||||
version = "0.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jsonpath-ng" },
|
||||
{ name = "ml-dtypes" },
|
||||
{ name = "numpy" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-ulid" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "redis" },
|
||||
{ name = "tenacity" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/1a/f1f0ff963622c34a9e9a9f2a0c6ad82bfbd05c082ecc89e38e092e3e9069/redisvl-0.15.0.tar.gz", hash = "sha256:0e382e9b6cd8378dfe1515b18f92d125cfba905f6f3c5fe9b8904b3ca840d1ca", size = 861480, upload-time = "2026-02-27T14:02:33.366Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/23/5c5263a3cfc66957fa3bb154ef9441fbbcfb2f4eae910eb18e316db168b1/redisvl-0.15.0-py3-none-any.whl", hash = "sha256:aff716b9a9c4aef9c81de9a12d9939a0170ff3b3a1fe9d4164e94b131a754290", size = 197935, upload-time = "2026-02-27T14:02:31.262Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.37.0"
|
||||
|
||||
Reference in New Issue
Block a user