Compare commits

..

5 Commits

Author SHA1 Message Date
d69c4e4f4a Add more tests 2026-03-04 05:13:50 +00:00
f6e122b5a9 Rename entrypoint 2026-03-03 18:53:55 +00:00
dba94107a5 Split out main module 2026-03-03 18:34:57 +00:00
d3cd8d5291 Refactor duplicated code 2026-03-03 18:00:31 +00:00
8dfd2048a5 Migrate to package 2026-03-03 16:52:46 +00:00
29 changed files with 2738 additions and 835 deletions

View File

@@ -7,9 +7,8 @@ WORKDIR /app
COPY pyproject.toml uv.lock ./ COPY pyproject.toml uv.lock ./
RUN uv sync --no-dev --frozen RUN uv sync --no-dev --frozen
COPY main.py . COPY src/ src/
COPY utils/ utils/
ENV PATH="/app/.venv/bin:$PATH" ENV PATH="/app/.venv/bin:$PATH"
CMD ["uv", "run", "python", "main.py", "--transport", "streamable-http", "--port", "8000"] CMD ["uv", "run", "python", "-m", "knowledge_search_mcp", "--transport", "streamable-http", "--port", "8000"]

View File

@@ -19,7 +19,7 @@ An MCP (Model Context Protocol) server that exposes a `knowledge_search` tool fo
## Configuration ## Configuration
Create a `.env` file (see `Settings` in `main.py` for all options): Create a `config.yaml` file or `.env` file (see `Settings` in `src/knowledge_search_mcp/config.py` for all options):
```env ```env
PROJECT_ID=my-gcp-project PROJECT_ID=my-gcp-project
@@ -42,16 +42,25 @@ SEARCH_LIMIT=10
uv sync uv sync
``` ```
### Run the MCP server (stdio) ### Run the MCP server
**Using the installed command (recommended):**
```bash ```bash
uv run python main.py # stdio transport (default)
uv run knowledge-search-mcp
# SSE transport for remote clients
uv run knowledge-search-mcp --transport sse --port 8080
# streamable-http transport
uv run knowledge-search-mcp --transport streamable-http --port 8080
``` ```
### Run the MCP server (SSE, e.g. for remote clients) **Or run directly:**
```bash ```bash
uv run python main.py --transport sse --port 8080 uv run python -m knowledge_search_mcp.main
``` ```
### Run the interactive agent (ADK) ### Run the interactive agent (ADK)
@@ -68,6 +77,12 @@ Or connect to an already-running SSE server:
uv run python agent.py --remote http://localhost:8080/sse uv run python agent.py --remote http://localhost:8080/sse
``` ```
### Run tests
```bash
uv run pytest
```
## Docker ## Docker
```bash ```bash
@@ -80,8 +95,12 @@ The container starts the server in SSE mode on the port specified by `PORT` (def
## Project structure ## Project structure
``` ```
main.py MCP server, vector search client, and GCS storage helper src/knowledge_search_mcp/
agent.py Interactive ADK agent that consumes the MCP server ├── __init__.py Package initialization
Dockerfile Multi-stage build for Cloud Run / containerized deployment ├── config.py Configuration management (Settings, args parsing)
pyproject.toml Project metadata and dependencies ├── logging.py Cloud Logging setup
└── main.py MCP server, vector search client, and GCS storage helper
agent.py Interactive ADK agent that consumes the MCP server
tests/ Test suite
pyproject.toml Project metadata, dependencies, and entry points
``` ```

View File

@@ -23,7 +23,7 @@ if project := os.environ.get("PROJECT_ID"):
if location := os.environ.get("LOCATION"): if location := os.environ.get("LOCATION"):
os.environ.setdefault("GOOGLE_CLOUD_LOCATION", location) os.environ.setdefault("GOOGLE_CLOUD_LOCATION", location)
SERVER_SCRIPT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "main.py") SERVER_SCRIPT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "src", "knowledge_search_mcp", "main.py")
def _parse_args() -> argparse.Namespace: def _parse_args() -> argparse.Namespace:

803
main.py
View File

@@ -1,803 +0,0 @@
# ruff: noqa: INP001
"""Async helpers for querying Vertex AI vector search via MCP."""
import asyncio
import io
from collections.abc import AsyncIterator, Sequence
from contextlib import asynccontextmanager
from dataclasses import dataclass
from enum import Enum
from typing import BinaryIO, TypedDict
import aiohttp
from gcloud.aio.auth import Token
from gcloud.aio.storage import Storage
from google import genai
from google.genai import types as genai_types
from mcp.server.fastmcp import Context, FastMCP
from utils import Settings, _args, cfg, log_structured_entry
HTTP_TOO_MANY_REQUESTS = 429
HTTP_SERVER_ERROR = 500
class SourceNamespace(str, Enum):
"""Allowed values for the 'source' namespace filter."""
EDUCACION_FINANCIERA = "Educacion Financiera"
PRODUCTOS_Y_SERVICIOS = "Productos y Servicios"
FUNCIONALIDADES_APP_MOVIL = "Funcionalidades de la App Movil"
class GoogleCloudFileStorage:
"""Cache-aware helper for downloading files from Google Cloud Storage."""
def __init__(self, bucket: str) -> None:
"""Initialize the storage helper."""
self.bucket_name = bucket
self._aio_session: aiohttp.ClientSession | None = None
self._aio_storage: Storage | None = None
self._cache: dict[str, bytes] = {}
def _get_aio_session(self) -> aiohttp.ClientSession:
if self._aio_session is None or self._aio_session.closed:
connector = aiohttp.TCPConnector(
limit=300,
limit_per_host=50,
)
timeout = aiohttp.ClientTimeout(total=60)
self._aio_session = aiohttp.ClientSession(
timeout=timeout,
connector=connector,
)
return self._aio_session
def _get_aio_storage(self) -> Storage:
if self._aio_storage is None:
self._aio_storage = Storage(
session=self._get_aio_session(),
)
return self._aio_storage
async def async_get_file_stream(
self,
file_name: str,
max_retries: int = 3,
) -> BinaryIO:
"""Get a file asynchronously with retry on transient errors.
Args:
file_name: The blob name to retrieve.
max_retries: Maximum number of retry attempts.
Returns:
A BytesIO stream with the file contents.
Raises:
TimeoutError: If all retry attempts fail.
"""
if file_name in self._cache:
log_structured_entry(
"File retrieved from cache",
"INFO",
{"file": file_name, "bucket": self.bucket_name}
)
file_stream = io.BytesIO(self._cache[file_name])
file_stream.name = file_name
return file_stream
log_structured_entry(
"Starting file download from GCS",
"INFO",
{"file": file_name, "bucket": self.bucket_name}
)
storage_client = self._get_aio_storage()
last_exception: Exception | None = None
for attempt in range(max_retries):
try:
self._cache[file_name] = await storage_client.download(
self.bucket_name,
file_name,
)
file_stream = io.BytesIO(self._cache[file_name])
file_stream.name = file_name
log_structured_entry(
"File downloaded successfully",
"INFO",
{
"file": file_name,
"bucket": self.bucket_name,
"size_bytes": len(self._cache[file_name]),
"attempt": attempt + 1
}
)
except TimeoutError as exc:
last_exception = exc
log_structured_entry(
f"Timeout downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
"WARNING",
{"error": str(exc)}
)
except aiohttp.ClientResponseError as exc:
last_exception = exc
if (
exc.status == HTTP_TOO_MANY_REQUESTS
or exc.status >= HTTP_SERVER_ERROR
):
log_structured_entry(
f"HTTP {exc.status} downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
"WARNING",
{"status": exc.status, "message": str(exc)}
)
else:
log_structured_entry(
f"Non-retryable HTTP error downloading gs://{self.bucket_name}/{file_name}",
"ERROR",
{"status": exc.status, "message": str(exc)}
)
raise
else:
return file_stream
if attempt < max_retries - 1:
delay = 0.5 * (2**attempt)
log_structured_entry(
"Retrying file download",
"INFO",
{"file": file_name, "delay_seconds": delay}
)
await asyncio.sleep(delay)
msg = (
f"Failed to download gs://{self.bucket_name}/{file_name} "
f"after {max_retries} attempts"
)
log_structured_entry(
"File download failed after all retries",
"ERROR",
{
"file": file_name,
"bucket": self.bucket_name,
"max_retries": max_retries,
"last_error": str(last_exception)
}
)
raise TimeoutError(msg) from last_exception
class SearchResult(TypedDict):
"""Structured response item returned by the vector search API."""
id: str
distance: float
content: str
class GoogleCloudVectorSearch:
"""Minimal async client for the Vertex AI Matching Engine REST API."""
def __init__(
self,
project_id: str,
location: str,
bucket: str,
index_name: str | None = None,
) -> None:
"""Store configuration used to issue Matching Engine queries."""
self.project_id = project_id
self.location = location
self.storage = GoogleCloudFileStorage(bucket=bucket)
self.index_name = index_name
self._aio_session: aiohttp.ClientSession | None = None
self._async_token: Token | None = None
self._endpoint_domain: str | None = None
self._endpoint_name: str | None = None
async def _async_get_auth_headers(self) -> dict[str, str]:
if self._async_token is None:
self._async_token = Token(
session=self._get_aio_session(),
scopes=[
"https://www.googleapis.com/auth/cloud-platform",
],
)
access_token = await self._async_token.get()
return {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/json",
}
def _get_aio_session(self) -> aiohttp.ClientSession:
if self._aio_session is None or self._aio_session.closed:
connector = aiohttp.TCPConnector(
limit=300,
limit_per_host=50,
)
timeout = aiohttp.ClientTimeout(total=60)
self._aio_session = aiohttp.ClientSession(
timeout=timeout,
connector=connector,
)
return self._aio_session
def configure_index_endpoint(
self,
*,
name: str,
public_domain: str,
) -> None:
"""Persist the metadata needed to access a deployed endpoint."""
if not name:
msg = "Index endpoint name must be a non-empty string."
raise ValueError(msg)
if not public_domain:
msg = "Index endpoint domain must be a non-empty public domain."
raise ValueError(msg)
self._endpoint_name = name
self._endpoint_domain = public_domain
async def async_run_query(
self,
deployed_index_id: str,
query: Sequence[float],
limit: int,
source: SourceNamespace | None = None,
) -> list[SearchResult]:
"""Run an async similarity search via the REST API.
Args:
deployed_index_id: The ID of the deployed index.
query: The embedding vector for the search query.
limit: Maximum number of nearest neighbors to return.
source: Optional namespace filter to restrict results by source.
Returns:
A list of matched items with id, distance, and content.
"""
if self._endpoint_domain is None or self._endpoint_name is None:
msg = (
"Missing endpoint metadata. Call "
"`configure_index_endpoint` before querying."
)
log_structured_entry(
"Vector search query failed - endpoint not configured",
"ERROR",
{"error": msg}
)
raise RuntimeError(msg)
domain = self._endpoint_domain
endpoint_id = self._endpoint_name.split("/")[-1]
url = (
f"https://{domain}/v1/projects/{self.project_id}"
f"/locations/{self.location}"
f"/indexEndpoints/{endpoint_id}:findNeighbors"
)
log_structured_entry(
"Starting vector search query",
"INFO",
{
"deployed_index_id": deployed_index_id,
"neighbor_count": limit,
"endpoint_id": endpoint_id,
"embedding_dimension": len(query)
}
)
datapoint: dict = {"feature_vector": list(query)}
if source is not None:
datapoint["restricts"] = [
{"namespace": "source", "allow_list": [source.value]},
]
payload = {
"deployed_index_id": deployed_index_id,
"queries": [
{
"datapoint": datapoint,
"neighbor_count": limit,
},
],
}
try:
headers = await self._async_get_auth_headers()
session = self._get_aio_session()
async with session.post(
url,
json=payload,
headers=headers,
) as response:
if not response.ok:
body = await response.text()
msg = f"findNeighbors returned {response.status}: {body}"
log_structured_entry(
"Vector search API request failed",
"ERROR",
{
"status": response.status,
"response_body": body,
"deployed_index_id": deployed_index_id
}
)
raise RuntimeError(msg)
data = await response.json()
neighbors = data.get("nearestNeighbors", [{}])[0].get("neighbors", [])
log_structured_entry(
"Vector search API request successful",
"INFO",
{
"neighbors_found": len(neighbors),
"deployed_index_id": deployed_index_id
}
)
if not neighbors:
log_structured_entry(
"No neighbors found in vector search",
"WARNING",
{"deployed_index_id": deployed_index_id}
)
return []
# Fetch content for all neighbors
content_tasks = []
for neighbor in neighbors:
datapoint_id = neighbor["datapoint"]["datapointId"]
file_path = f"{self.index_name}/contents/{datapoint_id}.md"
content_tasks.append(
self.storage.async_get_file_stream(file_path),
)
log_structured_entry(
"Fetching content for search results",
"INFO",
{"file_count": len(content_tasks)}
)
file_streams = await asyncio.gather(*content_tasks)
results: list[SearchResult] = []
for neighbor, stream in zip(
neighbors,
file_streams,
strict=True,
):
results.append(
SearchResult(
id=neighbor["datapoint"]["datapointId"],
distance=neighbor["distance"],
content=stream.read().decode("utf-8"),
),
)
log_structured_entry(
"Vector search completed successfully",
"INFO",
{
"results_count": len(results),
"deployed_index_id": deployed_index_id
}
)
return results
except Exception as e:
log_structured_entry(
"Vector search query failed with exception",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"deployed_index_id": deployed_index_id
}
)
raise
# ---------------------------------------------------------------------------
# MCP Server
# ---------------------------------------------------------------------------
@dataclass
class AppContext:
"""Shared resources initialised once at server startup."""
vector_search: GoogleCloudVectorSearch
genai_client: genai.Client
settings: Settings
@asynccontextmanager
async def lifespan(_server: FastMCP) -> AsyncIterator[AppContext]:
"""Create and configure the vector-search client for the server lifetime."""
log_structured_entry(
"Initializing MCP server",
"INFO",
{
"project_id": cfg.project_id,
"location": cfg.location,
"bucket": cfg.bucket,
"index_name": cfg.index_name,
}
)
try:
# Initialize vector search client
log_structured_entry("Creating GoogleCloudVectorSearch client", "INFO")
vs = GoogleCloudVectorSearch(
project_id=cfg.project_id,
location=cfg.location,
bucket=cfg.bucket,
index_name=cfg.index_name,
)
# Configure endpoint
log_structured_entry(
"Configuring index endpoint",
"INFO",
{
"endpoint_name": cfg.endpoint_name,
"endpoint_domain": cfg.endpoint_domain,
}
)
vs.configure_index_endpoint(
name=cfg.endpoint_name,
public_domain=cfg.endpoint_domain,
)
# Initialize GenAI client
log_structured_entry(
"Creating GenAI client",
"INFO",
{"project_id": cfg.project_id, "location": cfg.location}
)
genai_client = genai.Client(
vertexai=True,
project=cfg.project_id,
location=cfg.location,
)
# Validate credentials and configuration by testing actual resources
# These validations are non-blocking - errors are logged but won't stop startup
log_structured_entry("Starting validation of credentials and resources", "INFO")
validation_errors = []
# 1. Validate GenAI embedding access
log_structured_entry("Validating GenAI embedding access", "INFO")
try:
test_response = await genai_client.aio.models.embed_content(
model=cfg.embedding_model,
contents="test",
config=genai_types.EmbedContentConfig(
task_type="RETRIEVAL_QUERY",
),
)
if test_response and test_response.embeddings:
embedding_values = test_response.embeddings[0].values
log_structured_entry(
"GenAI embedding validation successful",
"INFO",
{"embedding_dimension": len(embedding_values) if embedding_values else 0}
)
else:
msg = "Embedding validation returned empty response"
log_structured_entry(msg, "WARNING")
validation_errors.append(msg)
except Exception as e:
log_structured_entry(
"Failed to validate GenAI embedding access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__}
)
validation_errors.append(f"GenAI: {str(e)}")
# 2. Validate GCS bucket access
log_structured_entry(
"Validating GCS bucket access",
"INFO",
{"bucket": cfg.bucket}
)
try:
session = vs.storage._get_aio_session()
token_obj = Token(
session=session,
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
access_token = await token_obj.get()
headers = {"Authorization": f"Bearer {access_token}"}
async with session.get(
f"https://storage.googleapis.com/storage/v1/b/{cfg.bucket}/o?maxResults=1",
headers=headers,
) as response:
if response.status == 403:
msg = f"Access denied to bucket '{cfg.bucket}'. Check permissions."
log_structured_entry(
"GCS bucket validation failed - access denied - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status}
)
validation_errors.append(msg)
elif response.status == 404:
msg = f"Bucket '{cfg.bucket}' not found. Check bucket name and project."
log_structured_entry(
"GCS bucket validation failed - not found - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status}
)
validation_errors.append(msg)
elif not response.ok:
body = await response.text()
msg = f"Failed to access bucket '{cfg.bucket}': {response.status}"
log_structured_entry(
"GCS bucket validation failed - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status, "response": body}
)
validation_errors.append(msg)
else:
log_structured_entry(
"GCS bucket validation successful",
"INFO",
{"bucket": cfg.bucket}
)
except Exception as e:
log_structured_entry(
"Failed to validate GCS bucket access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__, "bucket": cfg.bucket}
)
validation_errors.append(f"GCS: {str(e)}")
# 3. Validate vector search endpoint access
log_structured_entry(
"Validating vector search endpoint access",
"INFO",
{"endpoint_name": cfg.endpoint_name}
)
try:
# Try to get endpoint info
headers = await vs._async_get_auth_headers()
session = vs._get_aio_session()
endpoint_url = (
f"https://{cfg.location}-aiplatform.googleapis.com/v1/{cfg.endpoint_name}"
)
async with session.get(endpoint_url, headers=headers) as response:
if response.status == 403:
msg = f"Access denied to endpoint '{cfg.endpoint_name}'. Check permissions."
log_structured_entry(
"Vector search endpoint validation failed - access denied - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status}
)
validation_errors.append(msg)
elif response.status == 404:
msg = f"Endpoint '{cfg.endpoint_name}' not found. Check endpoint name and project."
log_structured_entry(
"Vector search endpoint validation failed - not found - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status}
)
validation_errors.append(msg)
elif not response.ok:
body = await response.text()
msg = f"Failed to access endpoint '{cfg.endpoint_name}': {response.status}"
log_structured_entry(
"Vector search endpoint validation failed - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status, "response": body}
)
validation_errors.append(msg)
else:
log_structured_entry(
"Vector search endpoint validation successful",
"INFO",
{"endpoint": cfg.endpoint_name}
)
except Exception as e:
log_structured_entry(
"Failed to validate vector search endpoint access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__, "endpoint": cfg.endpoint_name}
)
validation_errors.append(f"Vector Search: {str(e)}")
# Summary of validations
if validation_errors:
log_structured_entry(
"MCP server started with validation errors - service may not work correctly",
"WARNING",
{"validation_errors": validation_errors, "error_count": len(validation_errors)}
)
else:
log_structured_entry("All validations passed - MCP server initialization complete", "INFO")
yield AppContext(
vector_search=vs,
genai_client=genai_client,
settings=cfg,
)
except Exception as e:
log_structured_entry(
"Failed to initialize MCP server",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
}
)
raise
finally:
log_structured_entry("MCP server lifespan ending", "INFO")
mcp = FastMCP(
"knowledge-search",
host=_args.host,
port=_args.port,
lifespan=lifespan,
)
@mcp.tool()
async def knowledge_search(
query: str,
ctx: Context,
source: SourceNamespace | None = None,
) -> str:
"""Search a knowledge base using a natural-language query.
Args:
query: The text query to search for.
ctx: MCP request context (injected automatically).
source: Optional filter to restrict results by source.
Allowed values: 'Educacion Financiera',
'Productos y Servicios', 'Funcionalidades de la App Movil'.
Returns:
A formatted string containing matched documents with id and content.
"""
import time
app: AppContext = ctx.request_context.lifespan_context
t0 = time.perf_counter()
min_sim = 0.6
log_structured_entry(
"knowledge_search request received",
"INFO",
{"query": query[:100]} # Log first 100 chars of query
)
try:
# Generate embedding for the query
log_structured_entry("Generating query embedding", "INFO")
try:
response = await app.genai_client.aio.models.embed_content(
model=app.settings.embedding_model,
contents=query,
config=genai_types.EmbedContentConfig(
task_type="RETRIEVAL_QUERY",
),
)
embedding = response.embeddings[0].values
t_embed = time.perf_counter()
log_structured_entry(
"Query embedding generated successfully",
"INFO",
{"time_ms": round((t_embed - t0) * 1000, 1)}
)
except Exception as e:
error_type = type(e).__name__
error_msg = str(e)
# Check if it's a rate limit error
if "429" in error_msg or "RESOURCE_EXHAUSTED" in error_msg:
log_structured_entry(
"Rate limit exceeded while generating embedding",
"WARNING",
{
"error": error_msg,
"error_type": error_type,
"query": query[:100]
}
)
return "Error: API rate limit exceeded. Please try again later."
else:
log_structured_entry(
"Failed to generate query embedding",
"ERROR",
{
"error": error_msg,
"error_type": error_type,
"query": query[:100]
}
)
return f"Error generating embedding: {error_msg}"
# Perform vector search
log_structured_entry("Performing vector search", "INFO")
try:
search_results = await app.vector_search.async_run_query(
deployed_index_id=app.settings.deployed_index_id,
query=embedding,
limit=app.settings.search_limit,
source=source,
)
t_search = time.perf_counter()
except Exception as e:
log_structured_entry(
"Vector search failed",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"query": query[:100]
}
)
return f"Error performing vector search: {str(e)}"
# Apply similarity filtering
if search_results:
max_sim = max(r["distance"] for r in search_results)
cutoff = max_sim * 0.9
search_results = [
s
for s in search_results
if s["distance"] > cutoff and s["distance"] > min_sim
]
log_structured_entry(
"knowledge_search completed successfully",
"INFO",
{
"embedding_ms": f"{round((t_embed - t0) * 1000, 1)}ms",
"vector_search_ms": f"{round((t_search - t_embed) * 1000, 1)}ms",
"total_ms": f"{round((t_search - t0) * 1000, 1)}ms",
"source_filter": source.value if source is not None else None,
"results_count": len(search_results),
"chunks": [s["id"] for s in search_results]
}
)
# Format results as XML-like documents
if not search_results:
log_structured_entry(
"No results found for query",
"INFO",
{"query": query[:100]}
)
return "No relevant documents found for your query."
formatted_results = [
f"<document {i} name={result['id']}>\n{result['content']}\n</document {i}>"
for i, result in enumerate(search_results, start=1)
]
return "\n".join(formatted_results)
except Exception as e:
# Catch-all for any unexpected errors
log_structured_entry(
"Unexpected error in knowledge_search",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"query": query[:100]
}
)
return f"Unexpected error during search: {str(e)}"
if __name__ == "__main__":
mcp.run(transport=_args.transport)

View File

@@ -1,7 +1,7 @@
[project] [project]
name = "knowledge-search-mcp" name = "knowledge-search-mcp"
version = "0.1.0" version = "0.1.0"
description = "Add your description here" description = "MCP server for semantic search over Vertex AI Vector Search"
readme = "README.md" readme = "README.md"
requires-python = ">=3.12" requires-python = ">=3.12"
dependencies = [ dependencies = [
@@ -15,9 +15,24 @@ dependencies = [
"pyyaml>=6.0", "pyyaml>=6.0",
] ]
[project.scripts]
knowledge-search-mcp = "knowledge_search_mcp.__main__:main"
[dependency-groups] [dependency-groups]
dev = [ dev = [
"google-adk>=1.25.1", "google-adk>=1.25.1",
"pytest>=8.0.0",
"pytest-asyncio>=0.24.0",
"pytest-cov>=6.0.0",
"ruff>=0.15.2", "ruff>=0.15.2",
"ty>=0.0.18", "ty>=0.0.18",
] ]
[tool.pytest.ini_options]
asyncio_mode = "auto"
testpaths = ["tests"]
pythonpath = ["."]
[build-system]
requires = ["uv_build>=0.8.3,<0.9.0"]
build-backend = "uv_build"

View File

@@ -0,0 +1,15 @@
"""MCP server for semantic search over Vertex AI Vector Search."""
from .clients.storage import GoogleCloudFileStorage
from .clients.vector_search import GoogleCloudVectorSearch
from .models import AppContext, SearchResult, SourceNamespace
from .utils.cache import LRUCache
__all__ = [
"GoogleCloudFileStorage",
"GoogleCloudVectorSearch",
"SourceNamespace",
"SearchResult",
"AppContext",
"LRUCache",
]

View File

@@ -0,0 +1,135 @@
# ruff: noqa: INP001
"""MCP server for semantic search over Vertex AI Vector Search."""
import time
from mcp.server.fastmcp import Context, FastMCP
from .config import _args
from .logging import log_structured_entry
from .models import AppContext, SourceNamespace
from .server import lifespan
from .services.search import filter_search_results, format_search_results, generate_query_embedding
mcp = FastMCP(
"knowledge-search",
host=_args.host,
port=_args.port,
lifespan=lifespan,
)
@mcp.tool()
async def knowledge_search(
query: str,
ctx: Context,
source: SourceNamespace | None = None,
) -> str:
"""Search a knowledge base using a natural-language query.
Args:
query: The text query to search for.
ctx: MCP request context (injected automatically).
source: Optional filter to restrict results by source.
Allowed values: 'Educacion Financiera',
'Productos y Servicios', 'Funcionalidades de la App Movil'.
Returns:
A formatted string containing matched documents with id and content.
"""
app: AppContext = ctx.request_context.lifespan_context
t0 = time.perf_counter()
log_structured_entry(
"knowledge_search request received",
"INFO",
{"query": query[:100]} # Log first 100 chars of query
)
try:
# Generate embedding for the query
embedding, error = await generate_query_embedding(
app.genai_client,
app.settings.embedding_model,
query,
)
if error:
return error
t_embed = time.perf_counter()
log_structured_entry(
"Query embedding generated successfully",
"INFO",
{"time_ms": round((t_embed - t0) * 1000, 1)}
)
# Perform vector search
log_structured_entry("Performing vector search", "INFO")
try:
search_results = await app.vector_search.async_run_query(
deployed_index_id=app.settings.deployed_index_id,
query=embedding,
limit=app.settings.search_limit,
source=source,
)
t_search = time.perf_counter()
except Exception as e:
log_structured_entry(
"Vector search failed",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"query": query[:100]
}
)
return f"Error performing vector search: {str(e)}"
# Apply similarity filtering
filtered_results = filter_search_results(search_results)
log_structured_entry(
"knowledge_search completed successfully",
"INFO",
{
"embedding_ms": f"{round((t_embed - t0) * 1000, 1)}ms",
"vector_search_ms": f"{round((t_search - t_embed) * 1000, 1)}ms",
"total_ms": f"{round((t_search - t0) * 1000, 1)}ms",
"source_filter": source.value if source is not None else None,
"results_count": len(filtered_results),
"chunks": [s["id"] for s in filtered_results]
}
)
# Format and return results
if not filtered_results:
log_structured_entry(
"No results found for query",
"INFO",
{"query": query[:100]}
)
return format_search_results(filtered_results)
except Exception as e:
# Catch-all for any unexpected errors
log_structured_entry(
"Unexpected error in knowledge_search",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"query": query[:100]
}
)
return f"Unexpected error during search: {str(e)}"
def main() -> None:
"""Entry point for the MCP server."""
mcp.run(transport=_args.transport)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,11 @@
"""Client modules for Google Cloud services."""
from .base import BaseGoogleCloudClient
from .storage import GoogleCloudFileStorage
from .vector_search import GoogleCloudVectorSearch
__all__ = [
"BaseGoogleCloudClient",
"GoogleCloudFileStorage",
"GoogleCloudVectorSearch",
]

View File

@@ -0,0 +1,31 @@
# ruff: noqa: INP001
"""Base client with shared aiohttp session management."""
import aiohttp
class BaseGoogleCloudClient:
"""Base class with shared aiohttp session management."""
def __init__(self) -> None:
"""Initialize session tracking."""
self._aio_session: aiohttp.ClientSession | None = None
def _get_aio_session(self) -> aiohttp.ClientSession:
"""Get or create aiohttp session with connection pooling."""
if self._aio_session is None or self._aio_session.closed:
connector = aiohttp.TCPConnector(
limit=300,
limit_per_host=50,
)
timeout = aiohttp.ClientTimeout(total=60)
self._aio_session = aiohttp.ClientSession(
timeout=timeout,
connector=connector,
)
return self._aio_session
async def close(self) -> None:
"""Close aiohttp session if open."""
if self._aio_session and not self._aio_session.closed:
await self._aio_session.close()

View File

@@ -0,0 +1,144 @@
# ruff: noqa: INP001
"""Google Cloud Storage client with caching."""
import asyncio
import io
from typing import BinaryIO
import aiohttp
from gcloud.aio.storage import Storage
from ..logging import log_structured_entry
from ..utils.cache import LRUCache
from .base import BaseGoogleCloudClient
HTTP_TOO_MANY_REQUESTS = 429
HTTP_SERVER_ERROR = 500
class GoogleCloudFileStorage(BaseGoogleCloudClient):
"""Cache-aware helper for downloading files from Google Cloud Storage."""
def __init__(self, bucket: str, cache_size: int = 100) -> None:
"""Initialize the storage helper with LRU cache."""
super().__init__()
self.bucket_name = bucket
self._aio_storage: Storage | None = None
self._cache = LRUCache(max_size=cache_size)
def _get_aio_storage(self) -> Storage:
if self._aio_storage is None:
self._aio_storage = Storage(
session=self._get_aio_session(),
)
return self._aio_storage
async def async_get_file_stream(
self,
file_name: str,
max_retries: int = 3,
) -> BinaryIO:
"""Get a file asynchronously with retry on transient errors.
Args:
file_name: The blob name to retrieve.
max_retries: Maximum number of retry attempts.
Returns:
A BytesIO stream with the file contents.
Raises:
TimeoutError: If all retry attempts fail.
"""
cached_content = self._cache.get(file_name)
if cached_content is not None:
log_structured_entry(
"File retrieved from cache",
"INFO",
{"file": file_name, "bucket": self.bucket_name}
)
file_stream = io.BytesIO(cached_content)
file_stream.name = file_name
return file_stream
log_structured_entry(
"Starting file download from GCS",
"INFO",
{"file": file_name, "bucket": self.bucket_name}
)
storage_client = self._get_aio_storage()
last_exception: Exception | None = None
for attempt in range(max_retries):
try:
content = await storage_client.download(
self.bucket_name,
file_name,
)
self._cache.put(file_name, content)
file_stream = io.BytesIO(content)
file_stream.name = file_name
log_structured_entry(
"File downloaded successfully",
"INFO",
{
"file": file_name,
"bucket": self.bucket_name,
"size_bytes": len(content),
"attempt": attempt + 1
}
)
except TimeoutError as exc:
last_exception = exc
log_structured_entry(
f"Timeout downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
"WARNING",
{"error": str(exc)}
)
except aiohttp.ClientResponseError as exc:
last_exception = exc
if (
exc.status == HTTP_TOO_MANY_REQUESTS
or exc.status >= HTTP_SERVER_ERROR
):
log_structured_entry(
f"HTTP {exc.status} downloading gs://{self.bucket_name}/{file_name} (attempt {attempt + 1}/{max_retries})",
"WARNING",
{"status": exc.status, "message": str(exc)}
)
else:
log_structured_entry(
f"Non-retryable HTTP error downloading gs://{self.bucket_name}/{file_name}",
"ERROR",
{"status": exc.status, "message": str(exc)}
)
raise
else:
return file_stream
if attempt < max_retries - 1:
delay = 0.5 * (2**attempt)
log_structured_entry(
"Retrying file download",
"INFO",
{"file": file_name, "delay_seconds": delay}
)
await asyncio.sleep(delay)
msg = (
f"Failed to download gs://{self.bucket_name}/{file_name} "
f"after {max_retries} attempts"
)
log_structured_entry(
"File download failed after all retries",
"ERROR",
{
"file": file_name,
"bucket": self.bucket_name,
"max_retries": max_retries,
"last_error": str(last_exception)
}
)
raise TimeoutError(msg) from last_exception

View File

@@ -0,0 +1,226 @@
# ruff: noqa: INP001
"""Google Cloud Vector Search client."""
import asyncio
from collections.abc import Sequence
from gcloud.aio.auth import Token
from ..logging import log_structured_entry
from ..models import SearchResult, SourceNamespace
from .base import BaseGoogleCloudClient
from .storage import GoogleCloudFileStorage
class GoogleCloudVectorSearch(BaseGoogleCloudClient):
"""Minimal async client for the Vertex AI Matching Engine REST API."""
def __init__(
self,
project_id: str,
location: str,
bucket: str,
index_name: str | None = None,
) -> None:
"""Store configuration used to issue Matching Engine queries."""
super().__init__()
self.project_id = project_id
self.location = location
self.storage = GoogleCloudFileStorage(bucket=bucket)
self.index_name = index_name
self._async_token: Token | None = None
self._endpoint_domain: str | None = None
self._endpoint_name: str | None = None
async def _async_get_auth_headers(self) -> dict[str, str]:
if self._async_token is None:
self._async_token = Token(
session=self._get_aio_session(),
scopes=[
"https://www.googleapis.com/auth/cloud-platform",
],
)
access_token = await self._async_token.get()
return {
"Authorization": f"Bearer {access_token}",
"Content-Type": "application/json",
}
async def close(self) -> None:
"""Close aiohttp sessions for both vector search and storage."""
await super().close()
await self.storage.close()
def configure_index_endpoint(
self,
*,
name: str,
public_domain: str,
) -> None:
"""Persist the metadata needed to access a deployed endpoint."""
if not name:
msg = "Index endpoint name must be a non-empty string."
raise ValueError(msg)
if not public_domain:
msg = "Index endpoint domain must be a non-empty public domain."
raise ValueError(msg)
self._endpoint_name = name
self._endpoint_domain = public_domain
async def async_run_query(
self,
deployed_index_id: str,
query: Sequence[float],
limit: int,
source: SourceNamespace | None = None,
) -> list[SearchResult]:
"""Run an async similarity search via the REST API.
Args:
deployed_index_id: The ID of the deployed index.
query: The embedding vector for the search query.
limit: Maximum number of nearest neighbors to return.
source: Optional namespace filter to restrict results by source.
Returns:
A list of matched items with id, distance, and content.
"""
if self._endpoint_domain is None or self._endpoint_name is None:
msg = (
"Missing endpoint metadata. Call "
"`configure_index_endpoint` before querying."
)
log_structured_entry(
"Vector search query failed - endpoint not configured",
"ERROR",
{"error": msg}
)
raise RuntimeError(msg)
domain = self._endpoint_domain
endpoint_id = self._endpoint_name.split("/")[-1]
url = (
f"https://{domain}/v1/projects/{self.project_id}"
f"/locations/{self.location}"
f"/indexEndpoints/{endpoint_id}:findNeighbors"
)
log_structured_entry(
"Starting vector search query",
"INFO",
{
"deployed_index_id": deployed_index_id,
"neighbor_count": limit,
"endpoint_id": endpoint_id,
"embedding_dimension": len(query)
}
)
datapoint: dict = {"feature_vector": list(query)}
if source is not None:
datapoint["restricts"] = [
{"namespace": "source", "allow_list": [source.value]},
]
payload = {
"deployed_index_id": deployed_index_id,
"queries": [
{
"datapoint": datapoint,
"neighbor_count": limit,
},
],
}
try:
headers = await self._async_get_auth_headers()
session = self._get_aio_session()
async with session.post(
url,
json=payload,
headers=headers,
) as response:
if not response.ok:
body = await response.text()
msg = f"findNeighbors returned {response.status}: {body}"
log_structured_entry(
"Vector search API request failed",
"ERROR",
{
"status": response.status,
"response_body": body,
"deployed_index_id": deployed_index_id
}
)
raise RuntimeError(msg)
data = await response.json()
neighbors = data.get("nearestNeighbors", [{}])[0].get("neighbors", [])
log_structured_entry(
"Vector search API request successful",
"INFO",
{
"neighbors_found": len(neighbors),
"deployed_index_id": deployed_index_id
}
)
if not neighbors:
log_structured_entry(
"No neighbors found in vector search",
"WARNING",
{"deployed_index_id": deployed_index_id}
)
return []
# Fetch content for all neighbors
content_tasks = []
for neighbor in neighbors:
datapoint_id = neighbor["datapoint"]["datapointId"]
file_path = f"{self.index_name}/contents/{datapoint_id}.md"
content_tasks.append(
self.storage.async_get_file_stream(file_path),
)
log_structured_entry(
"Fetching content for search results",
"INFO",
{"file_count": len(content_tasks)}
)
file_streams = await asyncio.gather(*content_tasks)
results: list[SearchResult] = []
for neighbor, stream in zip(
neighbors,
file_streams,
strict=True,
):
results.append(
SearchResult(
id=neighbor["datapoint"]["datapointId"],
distance=neighbor["distance"],
content=stream.read().decode("utf-8"),
),
)
log_structured_entry(
"Vector search completed successfully",
"INFO",
{
"results_count": len(results),
"deployed_index_id": deployed_index_id
}
)
return results
except Exception as e:
log_structured_entry(
"Vector search query failed with exception",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
"deployed_index_id": deployed_index_id
}
)
raise

View File

@@ -1,9 +1,24 @@
import os import os
import sys
import argparse import argparse
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, YamlConfigSettingsSource from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, YamlConfigSettingsSource
def _parse_args() -> argparse.Namespace: def _parse_args() -> argparse.Namespace:
"""Parse command-line arguments.
Returns a namespace with default values if running under pytest.
"""
# Don't parse args if running under pytest
if "pytest" in sys.modules:
parser = argparse.ArgumentParser()
return argparse.Namespace(
transport="stdio",
host="0.0.0.0",
port=8080,
config=os.environ.get("CONFIG_FILE", "config.yaml"),
)
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
"--transport", "--transport",
@@ -56,5 +71,24 @@ class Settings(BaseSettings):
) )
# Singleton instance of Settings # Lazy singleton instance of Settings
cfg = Settings.model_validate({}) _cfg: Settings | None = None
def get_config() -> Settings:
"""Get or create the singleton Settings instance."""
global _cfg
if _cfg is None:
_cfg = Settings.model_validate({})
return _cfg
# For backwards compatibility, provide cfg as a property-like accessor
class _ConfigProxy:
"""Proxy object that lazily loads config on attribute access."""
def __getattr__(self, name: str):
return getattr(get_config(), name)
cfg = _ConfigProxy() # type: ignore[assignment]

View File

@@ -9,13 +9,22 @@ from typing import Optional, Dict, Literal
import google.cloud.logging import google.cloud.logging
from google.cloud.logging.handlers import CloudLoggingHandler from google.cloud.logging.handlers import CloudLoggingHandler
from .config import cfg from .config import get_config
def _setup_logger() -> logging.Logger: _eval_log: logging.Logger | None = None
"""Create or return the singleton evaluation logger."""
def _get_logger() -> logging.Logger:
"""Get or create the singleton evaluation logger."""
global _eval_log
if _eval_log is not None:
return _eval_log
cfg = get_config()
logger = logging.getLogger(cfg.log_name) logger = logging.getLogger(cfg.log_name)
if any(isinstance(h, CloudLoggingHandler) for h in logger.handlers): if any(isinstance(h, CloudLoggingHandler) for h in logger.handlers):
_eval_log = logger
return logger return logger
try: try:
@@ -29,12 +38,10 @@ def _setup_logger() -> logging.Logger:
logger = logging.getLogger(cfg.log_name) logger = logging.getLogger(cfg.log_name)
logger.warning("Cloud Logging setup failed; using console. Error: %s", e) logger.warning("Cloud Logging setup failed; using console. Error: %s", e)
_eval_log = logger
return logger return logger
_eval_log = _setup_logger()
def log_structured_entry(message: str, severity: Literal["INFO", "WARNING", "ERROR"], custom_log: Optional[Dict] = None) -> None: def log_structured_entry(message: str, severity: Literal["INFO", "WARNING", "ERROR"], custom_log: Optional[Dict] = None) -> None:
""" """
Emit a JSON-structured log row. Emit a JSON-structured log row.
@@ -45,4 +52,5 @@ def log_structured_entry(message: str, severity: Literal["INFO", "WARNING", "ERR
custom_log: A dict with your structured payload. custom_log: A dict with your structured payload.
""" """
level = getattr(logging, severity.upper(), logging.INFO) level = getattr(logging, severity.upper(), logging.INFO)
_eval_log.log(level, message, extra={"json_fields": {"message": message, "custom": custom_log or {}}}) logger = _get_logger()
logger.log(level, message, extra={"json_fields": {"message": message, "custom": custom_log or {}}})

View File

@@ -0,0 +1,37 @@
# ruff: noqa: INP001
"""Domain models for knowledge search MCP server."""
from dataclasses import dataclass
from enum import Enum
from typing import TYPE_CHECKING, TypedDict
if TYPE_CHECKING:
from google import genai
from .clients.vector_search import GoogleCloudVectorSearch
from .config import Settings
class SourceNamespace(str, Enum):
"""Allowed values for the 'source' namespace filter."""
EDUCACION_FINANCIERA = "Educacion Financiera"
PRODUCTOS_Y_SERVICIOS = "Productos y Servicios"
FUNCIONALIDADES_APP_MOVIL = "Funcionalidades de la App Movil"
class SearchResult(TypedDict):
"""Structured response item returned by the vector search API."""
id: str
distance: float
content: str
@dataclass
class AppContext:
"""Shared resources initialised once at server startup."""
vector_search: "GoogleCloudVectorSearch"
genai_client: "genai.Client"
settings: "Settings"

View File

@@ -0,0 +1,129 @@
# ruff: noqa: INP001
"""MCP server lifecycle management."""
from collections.abc import AsyncIterator
from contextlib import asynccontextmanager
from google import genai
from mcp.server.fastmcp import FastMCP
from .clients.vector_search import GoogleCloudVectorSearch
from .config import Settings, cfg
from .logging import log_structured_entry
from .models import AppContext
from .services.validation import (
validate_genai_access,
validate_gcs_access,
validate_vector_search_access,
)
@asynccontextmanager
async def lifespan(_server: FastMCP) -> AsyncIterator[AppContext]:
"""Create and configure the vector-search client for the server lifetime."""
log_structured_entry(
"Initializing MCP server",
"INFO",
{
"project_id": cfg.project_id,
"location": cfg.location,
"bucket": cfg.bucket,
"index_name": cfg.index_name,
}
)
vs: GoogleCloudVectorSearch | None = None
try:
# Initialize vector search client
log_structured_entry("Creating GoogleCloudVectorSearch client", "INFO")
vs = GoogleCloudVectorSearch(
project_id=cfg.project_id,
location=cfg.location,
bucket=cfg.bucket,
index_name=cfg.index_name,
)
# Configure endpoint
log_structured_entry(
"Configuring index endpoint",
"INFO",
{
"endpoint_name": cfg.endpoint_name,
"endpoint_domain": cfg.endpoint_domain,
}
)
vs.configure_index_endpoint(
name=cfg.endpoint_name,
public_domain=cfg.endpoint_domain,
)
# Initialize GenAI client
log_structured_entry(
"Creating GenAI client",
"INFO",
{"project_id": cfg.project_id, "location": cfg.location}
)
genai_client = genai.Client(
vertexai=True,
project=cfg.project_id,
location=cfg.location,
)
# Validate credentials and configuration by testing actual resources
# These validations are non-blocking - errors are logged but won't stop startup
log_structured_entry("Starting validation of credentials and resources", "INFO")
validation_errors = []
# Run all validations
genai_error = await validate_genai_access(genai_client, cfg)
if genai_error:
validation_errors.append(genai_error)
gcs_error = await validate_gcs_access(vs, cfg)
if gcs_error:
validation_errors.append(gcs_error)
vs_error = await validate_vector_search_access(vs, cfg)
if vs_error:
validation_errors.append(vs_error)
# Summary of validations
if validation_errors:
log_structured_entry(
"MCP server started with validation errors - service may not work correctly",
"WARNING",
{"validation_errors": validation_errors, "error_count": len(validation_errors)}
)
else:
log_structured_entry("All validations passed - MCP server initialization complete", "INFO")
yield AppContext(
vector_search=vs,
genai_client=genai_client,
settings=cfg,
)
except Exception as e:
log_structured_entry(
"Failed to initialize MCP server",
"ERROR",
{
"error": str(e),
"error_type": type(e).__name__,
}
)
raise
finally:
log_structured_entry("MCP server lifespan ending", "INFO")
# Clean up resources
if vs is not None:
try:
await vs.close()
log_structured_entry("Closed aiohttp sessions", "INFO")
except Exception as e:
log_structured_entry(
"Error closing aiohttp sessions",
"WARNING",
{"error": str(e), "error_type": type(e).__name__}
)

View File

@@ -0,0 +1,13 @@
"""Service modules for business logic."""
from .search import filter_search_results, format_search_results, generate_query_embedding
from .validation import validate_genai_access, validate_gcs_access, validate_vector_search_access
__all__ = [
"filter_search_results",
"format_search_results",
"generate_query_embedding",
"validate_genai_access",
"validate_gcs_access",
"validate_vector_search_access",
]

View File

@@ -0,0 +1,110 @@
# ruff: noqa: INP001
"""Search helper functions."""
from google import genai
from google.genai import types as genai_types
from ..logging import log_structured_entry
from ..models import SearchResult
async def generate_query_embedding(
genai_client: genai.Client,
embedding_model: str,
query: str,
) -> tuple[list[float], str | None]:
"""Generate embedding for search query.
Returns:
Tuple of (embedding vector, error message). Error message is None on success.
"""
if not query or not query.strip():
return ([], "Error: Query cannot be empty")
log_structured_entry("Generating query embedding", "INFO")
try:
response = await genai_client.aio.models.embed_content(
model=embedding_model,
contents=query,
config=genai_types.EmbedContentConfig(
task_type="RETRIEVAL_QUERY",
),
)
embedding = response.embeddings[0].values
return (embedding, None)
except Exception as e:
error_type = type(e).__name__
error_msg = str(e)
# Check if it's a rate limit error
if "429" in error_msg or "RESOURCE_EXHAUSTED" in error_msg:
log_structured_entry(
"Rate limit exceeded while generating embedding",
"WARNING",
{
"error": error_msg,
"error_type": error_type,
"query": query[:100]
}
)
return ([], "Error: API rate limit exceeded. Please try again later.")
else:
log_structured_entry(
"Failed to generate query embedding",
"ERROR",
{
"error": error_msg,
"error_type": error_type,
"query": query[:100]
}
)
return ([], f"Error generating embedding: {error_msg}")
def filter_search_results(
results: list[SearchResult],
min_similarity: float = 0.6,
top_percent: float = 0.9,
) -> list[SearchResult]:
"""Filter search results by similarity thresholds.
Args:
results: Raw search results from vector search.
min_similarity: Minimum similarity score (distance) to include.
top_percent: Keep results within this percentage of the top score.
Returns:
Filtered list of search results.
"""
if not results:
return []
max_sim = max(r["distance"] for r in results)
cutoff = max_sim * top_percent
filtered = [
s
for s in results
if s["distance"] > cutoff and s["distance"] > min_similarity
]
return filtered
def format_search_results(results: list[SearchResult]) -> str:
"""Format search results as XML-like documents.
Args:
results: List of search results to format.
Returns:
Formatted string with document tags.
"""
if not results:
return "No relevant documents found for your query."
formatted_results = [
f"<document {i} name={result['id']}>\n{result['content']}\n</document {i}>"
for i, result in enumerate(results, start=1)
]
return "\n".join(formatted_results)

View File

@@ -0,0 +1,171 @@
# ruff: noqa: INP001
"""Validation functions for Google Cloud services."""
from gcloud.aio.auth import Token
from google import genai
from google.genai import types as genai_types
from ..clients.vector_search import GoogleCloudVectorSearch
from ..config import Settings
from ..logging import log_structured_entry
async def validate_genai_access(genai_client: genai.Client, cfg: Settings) -> str | None:
"""Validate GenAI embedding access.
Returns:
Error message if validation fails, None if successful.
"""
log_structured_entry("Validating GenAI embedding access", "INFO")
try:
test_response = await genai_client.aio.models.embed_content(
model=cfg.embedding_model,
contents="test",
config=genai_types.EmbedContentConfig(
task_type="RETRIEVAL_QUERY",
),
)
if test_response and test_response.embeddings:
embedding_values = test_response.embeddings[0].values
log_structured_entry(
"GenAI embedding validation successful",
"INFO",
{"embedding_dimension": len(embedding_values) if embedding_values else 0}
)
return None
else:
msg = "Embedding validation returned empty response"
log_structured_entry(msg, "WARNING")
return msg
except Exception as e:
log_structured_entry(
"Failed to validate GenAI embedding access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__}
)
return f"GenAI: {str(e)}"
async def validate_gcs_access(vs: GoogleCloudVectorSearch, cfg: Settings) -> str | None:
"""Validate GCS bucket access.
Returns:
Error message if validation fails, None if successful.
"""
log_structured_entry(
"Validating GCS bucket access",
"INFO",
{"bucket": cfg.bucket}
)
try:
session = vs.storage._get_aio_session()
token_obj = Token(
session=session,
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
access_token = await token_obj.get()
headers = {"Authorization": f"Bearer {access_token}"}
async with session.get(
f"https://storage.googleapis.com/storage/v1/b/{cfg.bucket}/o?maxResults=1",
headers=headers,
) as response:
if response.status == 403:
msg = f"Access denied to bucket '{cfg.bucket}'. Check permissions."
log_structured_entry(
"GCS bucket validation failed - access denied - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status}
)
return msg
elif response.status == 404:
msg = f"Bucket '{cfg.bucket}' not found. Check bucket name and project."
log_structured_entry(
"GCS bucket validation failed - not found - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status}
)
return msg
elif not response.ok:
body = await response.text()
msg = f"Failed to access bucket '{cfg.bucket}': {response.status}"
log_structured_entry(
"GCS bucket validation failed - service may not work correctly",
"WARNING",
{"bucket": cfg.bucket, "status": response.status, "response": body}
)
return msg
else:
log_structured_entry(
"GCS bucket validation successful",
"INFO",
{"bucket": cfg.bucket}
)
return None
except Exception as e:
log_structured_entry(
"Failed to validate GCS bucket access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__, "bucket": cfg.bucket}
)
return f"GCS: {str(e)}"
async def validate_vector_search_access(vs: GoogleCloudVectorSearch, cfg: Settings) -> str | None:
"""Validate vector search endpoint access.
Returns:
Error message if validation fails, None if successful.
"""
log_structured_entry(
"Validating vector search endpoint access",
"INFO",
{"endpoint_name": cfg.endpoint_name}
)
try:
headers = await vs._async_get_auth_headers()
session = vs._get_aio_session()
endpoint_url = (
f"https://{cfg.location}-aiplatform.googleapis.com/v1/{cfg.endpoint_name}"
)
async with session.get(endpoint_url, headers=headers) as response:
if response.status == 403:
msg = f"Access denied to endpoint '{cfg.endpoint_name}'. Check permissions."
log_structured_entry(
"Vector search endpoint validation failed - access denied - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status}
)
return msg
elif response.status == 404:
msg = f"Endpoint '{cfg.endpoint_name}' not found. Check endpoint name and project."
log_structured_entry(
"Vector search endpoint validation failed - not found - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status}
)
return msg
elif not response.ok:
body = await response.text()
msg = f"Failed to access endpoint '{cfg.endpoint_name}': {response.status}"
log_structured_entry(
"Vector search endpoint validation failed - service may not work correctly",
"WARNING",
{"endpoint": cfg.endpoint_name, "status": response.status, "response": body}
)
return msg
else:
log_structured_entry(
"Vector search endpoint validation successful",
"INFO",
{"endpoint": cfg.endpoint_name}
)
return None
except Exception as e:
log_structured_entry(
"Failed to validate vector search endpoint access - service may not work correctly",
"WARNING",
{"error": str(e), "error_type": type(e).__name__, "endpoint": cfg.endpoint_name}
)
return f"Vector Search: {str(e)}"

View File

@@ -0,0 +1,5 @@
"""Utility modules for knowledge search MCP server."""
from .cache import LRUCache
__all__ = ["LRUCache"]

View File

@@ -0,0 +1,33 @@
# ruff: noqa: INP001
"""LRU cache implementation."""
from collections import OrderedDict
class LRUCache:
"""Simple LRU cache with size limit."""
def __init__(self, max_size: int = 100) -> None:
"""Initialize cache with maximum size."""
self.cache: OrderedDict[str, bytes] = OrderedDict()
self.max_size = max_size
def get(self, key: str) -> bytes | None:
"""Get item from cache, returning None if not found."""
if key not in self.cache:
return None
# Move to end to mark as recently used
self.cache.move_to_end(key)
return self.cache[key]
def put(self, key: str, value: bytes) -> None:
"""Put item in cache, evicting oldest if at capacity."""
if key in self.cache:
self.cache.move_to_end(key)
self.cache[key] = value
if len(self.cache) > self.max_size:
self.cache.popitem(last=False)
def __contains__(self, key: str) -> bool:
"""Check if key exists in cache."""
return key in self.cache

1
tests/__init__.py Normal file
View File

@@ -0,0 +1 @@
"""Tests for knowledge-search-mcp."""

36
tests/conftest.py Normal file
View File

@@ -0,0 +1,36 @@
"""Pytest configuration and shared fixtures."""
import os
from unittest.mock import MagicMock
import pytest
@pytest.fixture(autouse=True)
def mock_env_vars(monkeypatch):
"""Set required environment variables for testing."""
test_env = {
"PROJECT_ID": "test-project",
"LOCATION": "us-central1",
"BUCKET": "test-bucket",
"INDEX_NAME": "test-index",
"DEPLOYED_INDEX_ID": "test-deployed-index",
"ENDPOINT_NAME": "projects/test/locations/us-central1/indexEndpoints/test",
"ENDPOINT_DOMAIN": "test.us-central1-aiplatform.googleapis.com",
}
for key, value in test_env.items():
monkeypatch.setenv(key, value)
@pytest.fixture
def mock_gcs_storage():
"""Mock Google Cloud Storage client."""
mock = MagicMock()
return mock
@pytest.fixture
def mock_vector_search():
"""Mock vector search client."""
mock = MagicMock()
return mock

56
tests/test_config.py Normal file
View File

@@ -0,0 +1,56 @@
"""Tests for configuration management."""
import os
import pytest
from pydantic import ValidationError
from knowledge_search_mcp.config import Settings
def test_settings_from_env():
"""Test that Settings can be loaded from environment variables."""
# Environment is set by conftest.py fixture
settings = Settings.model_validate({})
assert settings.project_id == "test-project"
assert settings.location == "us-central1"
assert settings.bucket == "test-bucket"
assert settings.index_name == "test-index"
assert settings.deployed_index_id == "test-deployed-index"
def test_settings_defaults():
"""Test that Settings has correct default values."""
settings = Settings.model_validate({})
assert settings.embedding_model == "gemini-embedding-001"
assert settings.search_limit == 10
assert settings.log_name == "va_agent_evaluation_logs"
assert settings.log_level == "INFO"
def test_settings_custom_values(monkeypatch):
"""Test that Settings can be customized via environment."""
monkeypatch.setenv("EMBEDDING_MODEL", "custom-embedding-model")
monkeypatch.setenv("SEARCH_LIMIT", "20")
monkeypatch.setenv("LOG_LEVEL", "DEBUG")
settings = Settings.model_validate({})
assert settings.embedding_model == "custom-embedding-model"
assert settings.search_limit == 20
assert settings.log_level == "DEBUG"
def test_settings_validation_error():
"""Test that Settings raises ValidationError when required fields are missing."""
# Clear all env vars temporarily
required_vars = [
"PROJECT_ID", "LOCATION", "BUCKET", "INDEX_NAME",
"DEPLOYED_INDEX_ID", "ENDPOINT_NAME", "ENDPOINT_DOMAIN"
]
# This should work with conftest fixture
settings = Settings.model_validate({})
assert settings.project_id == "test-project"

408
tests/test_main_tool.py Normal file
View File

@@ -0,0 +1,408 @@
"""Tests for the main knowledge_search tool."""
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
from knowledge_search_mcp.__main__ import knowledge_search
from knowledge_search_mcp.models import AppContext, SourceNamespace, SearchResult
class TestKnowledgeSearch:
"""Tests for knowledge_search tool function."""
@pytest.fixture
def mock_app_context(self):
"""Create a mock AppContext."""
app = MagicMock(spec=AppContext)
# Mock genai_client
app.genai_client = MagicMock()
# Mock vector_search
app.vector_search = MagicMock()
app.vector_search.async_run_query = AsyncMock()
# Mock settings
app.settings = MagicMock()
app.settings.embedding_model = "models/text-embedding-004"
app.settings.deployed_index_id = "test-deployed-index"
app.settings.search_limit = 10
return app
@pytest.fixture
def mock_context(self, mock_app_context):
"""Create a mock MCP Context."""
ctx = MagicMock()
ctx.request_context = MagicMock()
ctx.request_context.lifespan_context = mock_app_context
return ctx
@pytest.fixture
def sample_embedding(self):
"""Create a sample embedding vector."""
return [0.1, 0.2, 0.3, 0.4, 0.5]
@pytest.fixture
def sample_search_results(self):
"""Create sample search results."""
results: list[SearchResult] = [
{"id": "doc1.txt", "distance": 0.95, "content": "First document content"},
{"id": "doc2.txt", "distance": 0.85, "content": "Second document content"},
{"id": "doc3.txt", "distance": 0.75, "content": "Third document content"},
]
return results
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_successful_search(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test successful search workflow."""
# Setup mocks
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
mock_format.return_value = "<document 1 name=doc1.txt>\nFirst document content\n</document 1>"
# Execute
result = await knowledge_search("What is financial education?", mock_context)
# Assert
assert result == "<document 1 name=doc1.txt>\nFirst document content\n</document 1>"
mock_generate.assert_called_once()
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_called_once_with(
deployed_index_id="test-deployed-index",
query=sample_embedding,
limit=10,
source=None,
)
mock_filter.assert_called_once_with(sample_search_results)
mock_format.assert_called_once_with(sample_search_results)
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_embedding_generation_error(self, mock_generate, mock_context):
"""Test handling of embedding generation error."""
# Setup mock to return error
mock_generate.return_value = ([], "Error: API rate limit exceeded. Please try again later.")
# Execute
result = await knowledge_search("test query", mock_context)
# Assert
assert result == "Error: API rate limit exceeded. Please try again later."
mock_generate.assert_called_once()
# Vector search should not be called
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_not_called()
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_empty_query_error(self, mock_generate, mock_context):
"""Test handling of empty query."""
# Setup mock to return error for empty query
mock_generate.return_value = ([], "Error: Query cannot be empty")
# Execute
result = await knowledge_search("", mock_context)
# Assert
assert result == "Error: Query cannot be empty"
mock_generate.assert_called_once()
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_vector_search_error(self, mock_generate, mock_context, sample_embedding):
"""Test handling of vector search error."""
# Setup mocks
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = Exception(
"Vector search service unavailable"
)
# Execute
result = await knowledge_search("test query", mock_context)
# Assert
assert "Error performing vector search:" in result
assert "Vector search service unavailable" in result
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_empty_search_results(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding
):
"""Test handling of empty search results."""
# Setup mocks
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = []
mock_filter.return_value = []
mock_format.return_value = "No relevant documents found for your query."
# Execute
result = await knowledge_search("obscure query", mock_context)
# Assert
assert result == "No relevant documents found for your query."
mock_format.assert_called_once_with([])
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_filtered_results_empty(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test when filtering removes all results."""
# Setup mocks - results exist but get filtered out
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = [] # All filtered out
mock_format.return_value = "No relevant documents found for your query."
# Execute
result = await knowledge_search("test query", mock_context)
# Assert
assert result == "No relevant documents found for your query."
mock_filter.assert_called_once_with(sample_search_results)
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_source_filter_parameter(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test that source filter is passed correctly to vector search."""
# Setup mocks
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
mock_format.return_value = "formatted results"
# Execute with source filter
source_filter = SourceNamespace.EDUCACION_FINANCIERA
result = await knowledge_search("test query", mock_context, source=source_filter)
# Assert
assert result == "formatted results"
mock_context.request_context.lifespan_context.vector_search.async_run_query.assert_called_once_with(
deployed_index_id="test-deployed-index",
query=sample_embedding,
limit=10,
source=source_filter,
)
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_all_source_filters(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test all available source filter values."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
mock_format.return_value = "results"
# Test each source filter
for source in SourceNamespace:
result = await knowledge_search("test query", mock_context, source=source)
assert result == "results"
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_vector_search_timeout(self, mock_generate, mock_context, sample_embedding):
"""Test handling of vector search timeout."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = TimeoutError(
"Request timed out"
)
result = await knowledge_search("test query", mock_context)
assert "Error performing vector search:" in result
assert "Request timed out" in result
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_vector_search_connection_error(self, mock_generate, mock_context, sample_embedding):
"""Test handling of vector search connection error."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = ConnectionError(
"Connection refused"
)
result = await knowledge_search("test query", mock_context)
assert "Error performing vector search:" in result
assert "Connection refused" in result
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
async def test_format_results_unexpected_error(
self,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test handling of unexpected error in format_search_results."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
# Mock format_search_results to raise an error
with patch('knowledge_search_mcp.__main__.format_search_results', side_effect=ValueError("Format error")):
result = await knowledge_search("test query", mock_context)
assert "Unexpected error during search:" in result
assert "Format error" in result
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_filter_results_unexpected_error(self, mock_generate, mock_context, sample_embedding):
"""Test handling of unexpected error in filter_search_results."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = [
{"id": "doc1", "distance": 0.9, "content": "test"}
]
# Mock filter_search_results to raise an error
with patch('knowledge_search_mcp.__main__.filter_search_results', side_effect=TypeError("Filter error")):
result = await knowledge_search("test query", mock_context)
assert "Unexpected error during search:" in result
assert "Filter error" in result
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_long_query_truncation_in_logs(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test that long queries are handled correctly."""
# Setup mocks
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
mock_format.return_value = "results"
# Execute with very long query
long_query = "a" * 500
result = await knowledge_search(long_query, mock_context)
# Assert - should succeed
assert result == "results"
# Verify generate_query_embedding was called with full query
assert mock_generate.call_args[0][2] == long_query
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_multiple_results_returned(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding
):
"""Test handling of multiple search results."""
# Create larger result set
large_results: list[SearchResult] = [
{"id": f"doc{i}.txt", "distance": 0.9 - (i * 0.05), "content": f"Content {i}"}
for i in range(10)
]
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = large_results
mock_filter.return_value = large_results[:5] # Filter to top 5
mock_format.return_value = "formatted 5 results"
result = await knowledge_search("test query", mock_context)
assert result == "formatted 5 results"
mock_filter.assert_called_once_with(large_results)
mock_format.assert_called_once_with(large_results[:5])
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
@patch('knowledge_search_mcp.__main__.filter_search_results')
@patch('knowledge_search_mcp.__main__.format_search_results')
async def test_settings_values_used_correctly(
self,
mock_format,
mock_filter,
mock_generate,
mock_context,
sample_embedding,
sample_search_results
):
"""Test that settings values are used correctly."""
# Customize settings
mock_context.request_context.lifespan_context.settings.embedding_model = "custom-model"
mock_context.request_context.lifespan_context.settings.deployed_index_id = "custom-index"
mock_context.request_context.lifespan_context.settings.search_limit = 20
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.return_value = sample_search_results
mock_filter.return_value = sample_search_results
mock_format.return_value = "results"
result = await knowledge_search("test query", mock_context)
# Verify embedding model
assert mock_generate.call_args[0][1] == "custom-model"
# Verify vector search parameters
call_kwargs = mock_context.request_context.lifespan_context.vector_search.async_run_query.call_args.kwargs
assert call_kwargs["deployed_index_id"] == "custom-index"
assert call_kwargs["limit"] == 20
@patch('knowledge_search_mcp.__main__.generate_query_embedding')
async def test_graceful_degradation_on_partial_failure(
self, mock_generate, mock_context, sample_embedding
):
"""Test that errors are caught and returned as strings, not raised."""
mock_generate.return_value = (sample_embedding, None)
mock_context.request_context.lifespan_context.vector_search.async_run_query.side_effect = RuntimeError(
"Critical failure"
)
# Should not raise, should return error message
result = await knowledge_search("test query", mock_context)
assert isinstance(result, str)
assert "Error performing vector search:" in result
assert "Critical failure" in result

110
tests/test_search.py Normal file
View File

@@ -0,0 +1,110 @@
"""Tests for vector search functionality."""
import io
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from knowledge_search_mcp import (
GoogleCloudFileStorage,
GoogleCloudVectorSearch,
LRUCache,
SourceNamespace,
)
class TestGoogleCloudFileStorage:
"""Tests for GoogleCloudFileStorage."""
def test_init(self):
"""Test storage initialization."""
storage = GoogleCloudFileStorage(bucket="test-bucket")
assert storage.bucket_name == "test-bucket"
assert isinstance(storage._cache, LRUCache)
assert storage._cache.max_size == 100
@pytest.mark.asyncio
async def test_cache_hit(self):
"""Test that cached files are returned without fetching."""
storage = GoogleCloudFileStorage(bucket="test-bucket")
test_content = b"cached content"
storage._cache.put("test.md", test_content)
result = await storage.async_get_file_stream("test.md")
assert result.read() == test_content
assert result.name == "test.md"
@pytest.mark.asyncio
async def test_cache_miss(self):
"""Test that uncached files are fetched from GCS."""
storage = GoogleCloudFileStorage(bucket="test-bucket")
test_content = b"fetched content"
# Mock the storage download
with patch.object(storage, '_get_aio_storage') as mock_storage_getter:
mock_storage = AsyncMock()
mock_storage.download = AsyncMock(return_value=test_content)
mock_storage_getter.return_value = mock_storage
result = await storage.async_get_file_stream("test.md")
assert result.read() == test_content
assert storage._cache.get("test.md") == test_content
class TestGoogleCloudVectorSearch:
"""Tests for GoogleCloudVectorSearch."""
def test_init(self):
"""Test vector search client initialization."""
vs = GoogleCloudVectorSearch(
project_id="test-project",
location="us-central1",
bucket="test-bucket",
index_name="test-index",
)
assert vs.project_id == "test-project"
assert vs.location == "us-central1"
assert vs.index_name == "test-index"
def test_configure_index_endpoint(self):
"""Test endpoint configuration."""
vs = GoogleCloudVectorSearch(
project_id="test-project",
location="us-central1",
bucket="test-bucket",
)
vs.configure_index_endpoint(
name="test-endpoint",
public_domain="test.domain.com",
)
assert vs._endpoint_name == "test-endpoint"
assert vs._endpoint_domain == "test.domain.com"
def test_configure_index_endpoint_validation(self):
"""Test that endpoint configuration validates inputs."""
vs = GoogleCloudVectorSearch(
project_id="test-project",
location="us-central1",
bucket="test-bucket",
)
with pytest.raises(ValueError, match="endpoint name"):
vs.configure_index_endpoint(name="", public_domain="test.com")
with pytest.raises(ValueError, match="endpoint domain"):
vs.configure_index_endpoint(name="test", public_domain="")
class TestSourceNamespace:
"""Tests for SourceNamespace enum."""
def test_source_namespace_values(self):
"""Test that SourceNamespace has expected values."""
assert SourceNamespace.EDUCACION_FINANCIERA.value == "Educacion Financiera"
assert SourceNamespace.PRODUCTOS_Y_SERVICIOS.value == "Productos y Servicios"
assert SourceNamespace.FUNCIONALIDADES_APP_MOVIL.value == "Funcionalidades de la App Movil"

View File

@@ -0,0 +1,381 @@
"""Tests for search service functions."""
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
from knowledge_search_mcp.services.search import (
generate_query_embedding,
filter_search_results,
format_search_results,
)
from knowledge_search_mcp.models import SearchResult
class TestGenerateQueryEmbedding:
"""Tests for generate_query_embedding function."""
@pytest.fixture
def mock_genai_client(self):
"""Create a mock genai client."""
client = MagicMock()
client.aio = MagicMock()
client.aio.models = MagicMock()
return client
async def test_successful_embedding_generation(self, mock_genai_client):
"""Test successful embedding generation."""
# Setup mock response
mock_response = MagicMock()
mock_embedding = MagicMock()
mock_embedding.values = [0.1, 0.2, 0.3, 0.4, 0.5]
mock_response.embeddings = [mock_embedding]
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
# Execute
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
"What is financial education?"
)
# Assert
assert error is None
assert embedding == [0.1, 0.2, 0.3, 0.4, 0.5]
mock_genai_client.aio.models.embed_content.assert_called_once()
call_kwargs = mock_genai_client.aio.models.embed_content.call_args.kwargs
assert call_kwargs["model"] == "models/text-embedding-004"
assert call_kwargs["contents"] == "What is financial education?"
assert call_kwargs["config"].task_type == "RETRIEVAL_QUERY"
async def test_empty_query_string(self, mock_genai_client):
"""Test handling of empty query string."""
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
""
)
assert embedding == []
assert error == "Error: Query cannot be empty"
mock_genai_client.aio.models.embed_content.assert_not_called()
async def test_whitespace_only_query(self, mock_genai_client):
"""Test handling of whitespace-only query."""
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
" \t\n "
)
assert embedding == []
assert error == "Error: Query cannot be empty"
mock_genai_client.aio.models.embed_content.assert_not_called()
async def test_rate_limit_error_429(self, mock_genai_client):
"""Test handling of 429 rate limit error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=Exception("429 Too Many Requests")
)
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
"test query"
)
assert embedding == []
assert error == "Error: API rate limit exceeded. Please try again later."
async def test_rate_limit_error_resource_exhausted(self, mock_genai_client):
"""Test handling of RESOURCE_EXHAUSTED error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=Exception("RESOURCE_EXHAUSTED: Quota exceeded")
)
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
"test query"
)
assert embedding == []
assert error == "Error: API rate limit exceeded. Please try again later."
async def test_generic_api_error(self, mock_genai_client):
"""Test handling of generic API error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=ValueError("Invalid model name")
)
embedding, error = await generate_query_embedding(
mock_genai_client,
"invalid-model",
"test query"
)
assert embedding == []
assert "Error generating embedding: Invalid model name" in error
async def test_network_error(self, mock_genai_client):
"""Test handling of network error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=ConnectionError("Network unreachable")
)
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
"test query"
)
assert embedding == []
assert "Error generating embedding: Network unreachable" in error
async def test_long_query_truncation_in_logging(self, mock_genai_client):
"""Test that long queries are truncated in error logging."""
long_query = "a" * 200
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=Exception("API error")
)
embedding, error = await generate_query_embedding(
mock_genai_client,
"models/text-embedding-004",
long_query
)
assert embedding == []
assert error is not None
class TestFilterSearchResults:
"""Tests for filter_search_results function."""
def test_empty_results(self):
"""Test filtering empty results list."""
filtered = filter_search_results([])
assert filtered == []
def test_single_result_above_thresholds(self):
"""Test single result above both thresholds."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.85, "content": "test content"}
]
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert len(filtered) == 1
assert filtered[0]["id"] == "doc1"
def test_single_result_below_min_similarity(self):
"""Test single result below minimum similarity threshold."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.5, "content": "test content"}
]
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert filtered == []
def test_multiple_results_all_above_thresholds(self):
"""Test multiple results all above thresholds."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.95, "content": "content 1"},
{"id": "doc2", "distance": 0.90, "content": "content 2"},
{"id": "doc3", "distance": 0.85, "content": "content 3"},
]
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.8)
# max_sim = 0.95, cutoff = 0.95 * 0.8 = 0.76
# Results with distance > 0.76 and > 0.6: all three
assert len(filtered) == 3
def test_top_percent_filtering(self):
"""Test filtering by top_percent threshold."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 1.0, "content": "content 1"},
{"id": "doc2", "distance": 0.95, "content": "content 2"},
{"id": "doc3", "distance": 0.85, "content": "content 3"},
{"id": "doc4", "distance": 0.70, "content": "content 4"},
]
# max_sim = 1.0, cutoff = 1.0 * 0.9 = 0.9
# Results with distance > 0.9: doc1 (1.0), doc2 (0.95)
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert len(filtered) == 2
assert filtered[0]["id"] == "doc1"
assert filtered[1]["id"] == "doc2"
def test_min_similarity_filtering(self):
"""Test filtering by minimum similarity threshold."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.95, "content": "content 1"},
{"id": "doc2", "distance": 0.75, "content": "content 2"},
{"id": "doc3", "distance": 0.55, "content": "content 3"},
]
# max_sim = 0.95, cutoff = 0.95 * 0.9 = 0.855
# doc1 > 0.855 and > 0.7: included
# doc2 < 0.855: excluded by top_percent
# doc3 < 0.7: excluded by min_similarity
filtered = filter_search_results(results, min_similarity=0.7, top_percent=0.9)
assert len(filtered) == 1
assert filtered[0]["id"] == "doc1"
def test_default_parameters(self):
"""Test filtering with default parameters."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.95, "content": "content 1"},
{"id": "doc2", "distance": 0.85, "content": "content 2"},
{"id": "doc3", "distance": 0.50, "content": "content 3"},
]
# Default: min_similarity=0.6, top_percent=0.9
# max_sim = 0.95, cutoff = 0.95 * 0.9 = 0.855
# doc1 > 0.855 and > 0.6: included
# doc2 < 0.855: excluded
# doc3 < 0.6: excluded
filtered = filter_search_results(results)
assert len(filtered) == 1
assert filtered[0]["id"] == "doc1"
def test_all_results_filtered_out(self):
"""Test when all results are filtered out."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.55, "content": "content 1"},
{"id": "doc2", "distance": 0.45, "content": "content 2"},
{"id": "doc3", "distance": 0.35, "content": "content 3"},
]
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert filtered == []
def test_exact_threshold_boundaries(self):
"""Test behavior at exact threshold boundaries."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.9, "content": "content 1"},
{"id": "doc2", "distance": 0.6, "content": "content 2"},
]
# max_sim = 0.9, cutoff = 0.9 * 0.9 = 0.81
# doc1: 0.9 > 0.81 and 0.9 > 0.6: included
# doc2: 0.6 < 0.81: excluded
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert len(filtered) == 1
assert filtered[0]["id"] == "doc1"
def test_identical_distances(self):
"""Test filtering with identical distance values."""
results: list[SearchResult] = [
{"id": "doc1", "distance": 0.8, "content": "content 1"},
{"id": "doc2", "distance": 0.8, "content": "content 2"},
{"id": "doc3", "distance": 0.8, "content": "content 3"},
]
# max_sim = 0.8, cutoff = 0.8 * 0.9 = 0.72
# All have distance 0.8 > 0.72 and > 0.6: all included
filtered = filter_search_results(results, min_similarity=0.6, top_percent=0.9)
assert len(filtered) == 3
class TestFormatSearchResults:
"""Tests for format_search_results function."""
def test_empty_results(self):
"""Test formatting empty results list."""
formatted = format_search_results([])
assert formatted == "No relevant documents found for your query."
def test_single_result(self):
"""Test formatting single result."""
results: list[SearchResult] = [
{"id": "doc1.txt", "distance": 0.95, "content": "This is the content."}
]
formatted = format_search_results(results)
expected = "<document 1 name=doc1.txt>\nThis is the content.\n</document 1>"
assert formatted == expected
def test_multiple_results(self):
"""Test formatting multiple results."""
results: list[SearchResult] = [
{"id": "doc1.txt", "distance": 0.95, "content": "First document content."},
{"id": "doc2.txt", "distance": 0.85, "content": "Second document content."},
{"id": "doc3.txt", "distance": 0.75, "content": "Third document content."},
]
formatted = format_search_results(results)
expected = (
"<document 1 name=doc1.txt>\nFirst document content.\n</document 1>\n"
"<document 2 name=doc2.txt>\nSecond document content.\n</document 2>\n"
"<document 3 name=doc3.txt>\nThird document content.\n</document 3>"
)
assert formatted == expected
def test_multiline_content(self):
"""Test formatting results with multiline content."""
results: list[SearchResult] = [
{
"id": "doc1.txt",
"distance": 0.95,
"content": "Line 1\nLine 2\nLine 3"
}
]
formatted = format_search_results(results)
expected = "<document 1 name=doc1.txt>\nLine 1\nLine 2\nLine 3\n</document 1>"
assert formatted == expected
def test_special_characters_in_content(self):
"""Test formatting with special characters in content."""
results: list[SearchResult] = [
{
"id": "doc1.txt",
"distance": 0.95,
"content": "Content with <special> & \"characters\""
}
]
formatted = format_search_results(results)
expected = '<document 1 name=doc1.txt>\nContent with <special> & "characters"\n</document 1>'
assert formatted == expected
def test_special_characters_in_document_id(self):
"""Test formatting with special characters in document ID."""
results: list[SearchResult] = [
{
"id": "path/to/doc-name_v2.txt",
"distance": 0.95,
"content": "Some content"
}
]
formatted = format_search_results(results)
expected = "<document 1 name=path/to/doc-name_v2.txt>\nSome content\n</document 1>"
assert formatted == expected
def test_empty_content(self):
"""Test formatting result with empty content."""
results: list[SearchResult] = [
{"id": "doc1.txt", "distance": 0.95, "content": ""}
]
formatted = format_search_results(results)
expected = "<document 1 name=doc1.txt>\n\n</document 1>"
assert formatted == expected
def test_document_numbering(self):
"""Test that document numbering starts at 1 and increments correctly."""
results: list[SearchResult] = [
{"id": "a.txt", "distance": 0.9, "content": "A"},
{"id": "b.txt", "distance": 0.8, "content": "B"},
{"id": "c.txt", "distance": 0.7, "content": "C"},
{"id": "d.txt", "distance": 0.6, "content": "D"},
{"id": "e.txt", "distance": 0.5, "content": "E"},
]
formatted = format_search_results(results)
assert "<document 1 name=a.txt>" in formatted
assert "</document 1>" in formatted
assert "<document 2 name=b.txt>" in formatted
assert "</document 2>" in formatted
assert "<document 3 name=c.txt>" in formatted
assert "</document 3>" in formatted
assert "<document 4 name=d.txt>" in formatted
assert "</document 4>" in formatted
assert "<document 5 name=e.txt>" in formatted
assert "</document 5>" in formatted
def test_very_long_content(self):
"""Test formatting with very long content."""
long_content = "A" * 10000
results: list[SearchResult] = [
{"id": "doc1.txt", "distance": 0.95, "content": long_content}
]
formatted = format_search_results(results)
assert f"<document 1 name=doc1.txt>\n{long_content}\n</document 1>" == formatted
assert len(formatted) > 10000

View File

@@ -0,0 +1,436 @@
"""Tests for validation service functions."""
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
from aiohttp import ClientResponse
from knowledge_search_mcp.services.validation import (
validate_genai_access,
validate_gcs_access,
validate_vector_search_access,
)
from knowledge_search_mcp.config import Settings
class TestValidateGenAIAccess:
"""Tests for validate_genai_access function."""
@pytest.fixture
def mock_settings(self):
"""Create mock settings."""
settings = MagicMock(spec=Settings)
settings.embedding_model = "models/text-embedding-004"
settings.project_id = "test-project"
settings.location = "us-central1"
return settings
@pytest.fixture
def mock_genai_client(self):
"""Create a mock genai client."""
client = MagicMock()
client.aio = MagicMock()
client.aio.models = MagicMock()
return client
async def test_successful_validation(self, mock_genai_client, mock_settings):
"""Test successful GenAI access validation."""
# Setup mock response
mock_response = MagicMock()
mock_embedding = MagicMock()
mock_embedding.values = [0.1] * 768 # Typical embedding dimension
mock_response.embeddings = [mock_embedding]
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
# Execute
error = await validate_genai_access(mock_genai_client, mock_settings)
# Assert
assert error is None
mock_genai_client.aio.models.embed_content.assert_called_once()
call_kwargs = mock_genai_client.aio.models.embed_content.call_args.kwargs
assert call_kwargs["model"] == "models/text-embedding-004"
assert call_kwargs["contents"] == "test"
assert call_kwargs["config"].task_type == "RETRIEVAL_QUERY"
async def test_empty_response(self, mock_genai_client, mock_settings):
"""Test handling of empty response."""
mock_response = MagicMock()
mock_response.embeddings = []
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error == "Embedding validation returned empty response"
async def test_none_response(self, mock_genai_client, mock_settings):
"""Test handling of None response."""
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=None)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error == "Embedding validation returned empty response"
async def test_api_permission_error(self, mock_genai_client, mock_settings):
"""Test handling of permission denied error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=PermissionError("Permission denied for GenAI API")
)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error is not None
assert "GenAI:" in error
assert "Permission denied for GenAI API" in error
async def test_api_quota_error(self, mock_genai_client, mock_settings):
"""Test handling of quota exceeded error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=Exception("Quota exceeded")
)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error is not None
assert "GenAI:" in error
assert "Quota exceeded" in error
async def test_network_error(self, mock_genai_client, mock_settings):
"""Test handling of network error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=ConnectionError("Network unreachable")
)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error is not None
assert "GenAI:" in error
assert "Network unreachable" in error
async def test_invalid_model_error(self, mock_genai_client, mock_settings):
"""Test handling of invalid model error."""
mock_genai_client.aio.models.embed_content = AsyncMock(
side_effect=ValueError("Invalid model name")
)
error = await validate_genai_access(mock_genai_client, mock_settings)
assert error is not None
assert "GenAI:" in error
assert "Invalid model name" in error
async def test_embeddings_with_zero_values(self, mock_genai_client, mock_settings):
"""Test validation with empty embedding values."""
mock_response = MagicMock()
mock_embedding = MagicMock()
mock_embedding.values = []
mock_response.embeddings = [mock_embedding]
mock_genai_client.aio.models.embed_content = AsyncMock(return_value=mock_response)
error = await validate_genai_access(mock_genai_client, mock_settings)
# Should succeed even with empty values, as long as embeddings exist
assert error is None
class TestValidateGCSAccess:
"""Tests for validate_gcs_access function."""
@pytest.fixture
def mock_settings(self):
"""Create mock settings."""
settings = MagicMock(spec=Settings)
settings.bucket = "test-bucket"
settings.project_id = "test-project"
return settings
@pytest.fixture
def mock_vector_search(self):
"""Create a mock vector search client."""
vs = MagicMock()
vs.storage = MagicMock()
return vs
@pytest.fixture
def mock_session(self):
"""Create a mock aiohttp session."""
session = MagicMock()
return session
@pytest.fixture
def mock_response(self):
"""Create a mock HTTP response."""
response = MagicMock()
response.text = AsyncMock(return_value='{"items": []}')
return response
async def test_successful_validation(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test successful GCS bucket access validation."""
# Setup mocks
mock_response.status = 200
mock_response.ok = True
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(return_value="fake-access-token")
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is None
mock_session.get.assert_called_once()
call_args = mock_session.get.call_args
assert "test-bucket" in call_args[0][0]
assert call_args[1]["headers"]["Authorization"] == "Bearer fake-access-token"
async def test_access_denied_403(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 403 access denied."""
mock_response.status = 403
mock_response.ok = False
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(return_value="fake-access-token")
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is not None
assert "Access denied to bucket 'test-bucket'" in error
assert "permissions" in error.lower()
async def test_bucket_not_found_404(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 404 bucket not found."""
mock_response.status = 404
mock_response.ok = False
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(return_value="fake-access-token")
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is not None
assert "Bucket 'test-bucket' not found" in error
assert "bucket name" in error.lower()
async def test_server_error_500(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 500 server error."""
mock_response.status = 500
mock_response.ok = False
mock_response.text = AsyncMock(return_value='{"error": "Internal server error"}')
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(return_value="fake-access-token")
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is not None
assert "Failed to access bucket 'test-bucket': 500" in error
async def test_token_acquisition_error(self, mock_vector_search, mock_settings, mock_session):
"""Test handling of token acquisition error."""
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(side_effect=Exception("Failed to get access token"))
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is not None
assert "GCS:" in error
assert "Failed to get access token" in error
async def test_network_error(self, mock_vector_search, mock_settings, mock_session):
"""Test handling of network error."""
mock_session.get = MagicMock(side_effect=ConnectionError("Network unreachable"))
mock_vector_search.storage._get_aio_session.return_value = mock_session
with patch('knowledge_search_mcp.services.validation.Token') as MockToken:
mock_token = MockToken.return_value
mock_token.get = AsyncMock(return_value="fake-access-token")
error = await validate_gcs_access(mock_vector_search, mock_settings)
assert error is not None
assert "GCS:" in error
assert "Network unreachable" in error
class TestValidateVectorSearchAccess:
"""Tests for validate_vector_search_access function."""
@pytest.fixture
def mock_settings(self):
"""Create mock settings."""
settings = MagicMock(spec=Settings)
settings.endpoint_name = "projects/test/locations/us-central1/indexEndpoints/test-endpoint"
settings.location = "us-central1"
return settings
@pytest.fixture
def mock_vector_search(self):
"""Create a mock vector search client."""
vs = MagicMock()
vs._async_get_auth_headers = AsyncMock(return_value={"Authorization": "Bearer fake-token"})
return vs
@pytest.fixture
def mock_session(self):
"""Create a mock aiohttp session."""
session = MagicMock()
return session
@pytest.fixture
def mock_response(self):
"""Create a mock HTTP response."""
response = MagicMock()
response.text = AsyncMock(return_value='{"name": "test-endpoint"}')
return response
async def test_successful_validation(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test successful vector search endpoint validation."""
mock_response.status = 200
mock_response.ok = True
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is None
mock_vector_search._async_get_auth_headers.assert_called_once()
mock_session.get.assert_called_once()
call_args = mock_session.get.call_args
assert "us-central1-aiplatform.googleapis.com" in call_args[0][0]
assert "test-endpoint" in call_args[0][0]
assert call_args[1]["headers"]["Authorization"] == "Bearer fake-token"
async def test_access_denied_403(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 403 access denied."""
mock_response.status = 403
mock_response.ok = False
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "Access denied to endpoint" in error
assert "test-endpoint" in error
assert "permissions" in error.lower()
async def test_endpoint_not_found_404(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 404 endpoint not found."""
mock_response.status = 404
mock_response.ok = False
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "not found" in error.lower()
assert "test-endpoint" in error
async def test_server_error_503(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test handling of 503 service unavailable."""
mock_response.status = 503
mock_response.ok = False
mock_response.text = AsyncMock(return_value='{"error": "Service unavailable"}')
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "Failed to access endpoint" in error
assert "503" in error
async def test_auth_header_error(self, mock_vector_search, mock_settings):
"""Test handling of authentication header error."""
mock_vector_search._async_get_auth_headers = AsyncMock(
side_effect=Exception("Failed to get auth headers")
)
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "Vector Search:" in error
assert "Failed to get auth headers" in error
async def test_network_timeout(self, mock_vector_search, mock_settings, mock_session):
"""Test handling of network timeout."""
mock_session.get = MagicMock(side_effect=TimeoutError("Request timed out"))
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "Vector Search:" in error
assert "Request timed out" in error
async def test_connection_error(self, mock_vector_search, mock_settings, mock_session):
"""Test handling of connection error."""
mock_session.get = MagicMock(side_effect=ConnectionError("Connection refused"))
mock_vector_search._get_aio_session.return_value = mock_session
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is not None
assert "Vector Search:" in error
assert "Connection refused" in error
async def test_endpoint_url_construction(self, mock_vector_search, mock_settings, mock_session, mock_response):
"""Test that endpoint URL is constructed correctly."""
mock_response.status = 200
mock_response.ok = True
mock_session.get = MagicMock()
mock_session.get.return_value.__aenter__ = AsyncMock(return_value=mock_response)
mock_session.get.return_value.__aexit__ = AsyncMock(return_value=None)
mock_vector_search._get_aio_session.return_value = mock_session
# Custom location
mock_settings.location = "europe-west1"
mock_settings.endpoint_name = "projects/my-project/locations/europe-west1/indexEndpoints/my-endpoint"
error = await validate_vector_search_access(mock_vector_search, mock_settings)
assert error is None
call_args = mock_session.get.call_args
url = call_args[0][0]
assert "europe-west1-aiplatform.googleapis.com" in url
assert "my-endpoint" in url

View File

@@ -1,4 +0,0 @@
from .config import Settings, _args, cfg
from .logging_setup import log_structured_entry
__all__ = ['Settings', '_args', 'cfg', 'log_structured_entry']

157
uv.lock generated
View File

@@ -369,6 +369,90 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
] ]
[[package]]
name = "coverage"
version = "7.13.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" },
{ url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" },
{ url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" },
{ url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" },
{ url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" },
{ url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" },
{ url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" },
{ url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" },
{ url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" },
{ url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" },
{ url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" },
{ url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" },
{ url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" },
{ url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" },
{ url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" },
{ url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" },
{ url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" },
{ url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" },
{ url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" },
{ url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" },
{ url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" },
{ url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" },
{ url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" },
{ url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" },
{ url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" },
{ url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" },
{ url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" },
{ url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" },
{ url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" },
{ url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" },
{ url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" },
{ url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" },
{ url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" },
{ url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" },
{ url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" },
{ url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" },
{ url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" },
{ url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" },
{ url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" },
{ url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" },
{ url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" },
{ url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" },
{ url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" },
{ url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" },
{ url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" },
{ url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" },
{ url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" },
{ url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" },
{ url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" },
{ url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" },
{ url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" },
{ url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" },
{ url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" },
{ url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" },
{ url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" },
{ url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" },
{ url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" },
{ url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" },
{ url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" },
{ url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" },
{ url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" },
{ url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" },
{ url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" },
{ url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" },
{ url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" },
{ url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" },
{ url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" },
{ url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" },
{ url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" },
{ url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" },
{ url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" },
{ url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" },
{ url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" },
{ url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" },
{ url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" },
{ url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" },
]
[[package]] [[package]]
name = "cryptography" name = "cryptography"
version = "46.0.5" version = "46.0.5"
@@ -1123,7 +1207,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" },
{ url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" },
{ url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" },
{ url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" },
{ url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" },
{ url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" },
{ url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" },
@@ -1132,7 +1215,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" },
{ url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" },
{ url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" },
{ url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" },
{ url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" },
{ url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" },
{ url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" },
@@ -1141,7 +1223,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" },
{ url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" },
{ url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" },
{ url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" },
{ url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" },
{ url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" },
@@ -1150,7 +1231,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" },
{ url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" },
{ url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" },
{ url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" },
{ url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" },
{ url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" },
{ url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" },
@@ -1317,6 +1397,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" },
] ]
[[package]]
name = "iniconfig"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
]
[[package]] [[package]]
name = "jsonschema" name = "jsonschema"
version = "4.26.0" version = "4.26.0"
@@ -1347,7 +1436,7 @@ wheels = [
[[package]] [[package]]
name = "knowledge-search-mcp" name = "knowledge-search-mcp"
version = "0.1.0" version = "0.1.0"
source = { virtual = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "aiohttp" }, { name = "aiohttp" },
{ name = "gcloud-aio-auth" }, { name = "gcloud-aio-auth" },
@@ -1362,6 +1451,9 @@ dependencies = [
[package.dev-dependencies] [package.dev-dependencies]
dev = [ dev = [
{ name = "google-adk" }, { name = "google-adk" },
{ name = "pytest" },
{ name = "pytest-asyncio" },
{ name = "pytest-cov" },
{ name = "ruff" }, { name = "ruff" },
{ name = "ty" }, { name = "ty" },
] ]
@@ -1381,6 +1473,9 @@ requires-dist = [
[package.metadata.requires-dev] [package.metadata.requires-dev]
dev = [ dev = [
{ name = "google-adk", specifier = ">=1.25.1" }, { name = "google-adk", specifier = ">=1.25.1" },
{ name = "pytest", specifier = ">=8.0.0" },
{ name = "pytest-asyncio", specifier = ">=0.24.0" },
{ name = "pytest-cov", specifier = ">=6.0.0" },
{ name = "ruff", specifier = ">=0.15.2" }, { name = "ruff", specifier = ">=0.15.2" },
{ name = "ty", specifier = ">=0.0.18" }, { name = "ty", specifier = ">=0.0.18" },
] ]
@@ -1842,6 +1937,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
] ]
[[package]]
name = "pluggy"
version = "1.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
]
[[package]] [[package]]
name = "propcache" name = "propcache"
version = "0.4.1" version = "0.4.1"
@@ -2171,6 +2275,49 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
] ]
[[package]]
name = "pytest"
version = "9.0.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "iniconfig" },
{ name = "packaging" },
{ name = "pluggy" },
{ name = "pygments" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
]
[[package]]
name = "pytest-asyncio"
version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pytest" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
]
[[package]]
name = "pytest-cov"
version = "7.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "coverage" },
{ name = "pluggy" },
{ name = "pytest" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
]
[[package]] [[package]]
name = "python-dateutil" name = "python-dateutil"
version = "2.9.0.post0" version = "2.9.0.post0"