forked from innovacion/searchbox
Compare commits
7 Commits
v0.1.1
...
embedding-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8a7918e3d8 | ||
| 0fa82cff7d | |||
|
|
a3d972ddb9 | ||
| 5e9900d6a7 | |||
| 611c2c4b81 | |||
| 23a4ce9fe3 | |||
| 9ddb970ca4 |
3
.github/workflows/ci.yaml
vendored
3
.github/workflows/ci.yaml
vendored
@@ -4,6 +4,9 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
ci:
|
||||
|
||||
@@ -22,3 +22,6 @@ spec:
|
||||
image: gitea.ia-innovacion.work/innovacion/searchbox-mcp:latest
|
||||
ports:
|
||||
- containerPort: 8000
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: vault-readonly-token
|
||||
|
||||
5
.mise/config.toml
Normal file
5
.mise/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[tasks.check]
|
||||
run = ["uv run ruff check --fix", "uv run ruff format"]
|
||||
|
||||
[tasks.test]
|
||||
run = "uv run pytest --cov"
|
||||
@@ -5,6 +5,7 @@ description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"openai>=2.0.0",
|
||||
"qdrant-client==1.13",
|
||||
"vault-settings>=0.1.0",
|
||||
]
|
||||
|
||||
@@ -6,10 +6,15 @@ operations across different backend implementations.
|
||||
|
||||
from typing import final
|
||||
|
||||
from .embedder.base import BaseEmbedder
|
||||
from .engine import Backend, get_engine
|
||||
from .models import Chunk, Condition
|
||||
|
||||
|
||||
class EmbedderNotConfiguredError(ValueError):
|
||||
"""Raised when embedder is required but not configured."""
|
||||
|
||||
|
||||
@final
|
||||
class Client:
|
||||
"""High-level client for vector search operations.
|
||||
@@ -20,20 +25,29 @@ class Client:
|
||||
Args:
|
||||
backend: The vector search backend to use (e.g., Backend.QDRANT)
|
||||
collection: Name of the collection to operate on
|
||||
embedder: Optional embedder for converting text queries to vectors
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, backend: Backend, collection: str, **kwargs: str):
|
||||
def __init__(
|
||||
self,
|
||||
backend: Backend,
|
||||
collection: str,
|
||||
embedder: BaseEmbedder | None = None,
|
||||
**kwargs: str,
|
||||
):
|
||||
"""Initialize the client with a specific backend and collection.
|
||||
|
||||
Args:
|
||||
backend: The vector search backend to use
|
||||
collection: Name of the collection to operate on
|
||||
embedder: Optional embedder for automatic query embedding
|
||||
**kwargs: Additional keyword arguments to pass to the backend
|
||||
|
||||
"""
|
||||
self.engine = get_engine(backend, **kwargs)
|
||||
self.collection = collection
|
||||
self.embedder = embedder
|
||||
|
||||
async def create_index(self, size: int) -> bool:
|
||||
"""Create a vector index with the specified dimension size.
|
||||
@@ -61,7 +75,7 @@ class Client:
|
||||
|
||||
async def semantic_search(
|
||||
self,
|
||||
embedding: list[float],
|
||||
query: str | list[float],
|
||||
limit: int = 10,
|
||||
conditions: list[Condition] | None = None,
|
||||
threshold: float | None = None,
|
||||
@@ -69,7 +83,7 @@ class Client:
|
||||
"""Perform semantic search using vector similarity.
|
||||
|
||||
Args:
|
||||
embedding: Query vector as a list of floats
|
||||
query: Text query to embed (requires embedder) or pre-computed vector
|
||||
limit: Maximum number of results to return (default: 10)
|
||||
conditions: Optional list of filter conditions to apply
|
||||
threshold: Optional minimum similarity score threshold
|
||||
@@ -77,7 +91,20 @@ class Client:
|
||||
Returns:
|
||||
List of search results with chunk IDs, scores, and metadata
|
||||
|
||||
Raises:
|
||||
EmbedderNotConfiguredError: If query is a string but no embedder is configured
|
||||
|
||||
"""
|
||||
# Handle query parameter
|
||||
if isinstance(query, str):
|
||||
if self.embedder is None:
|
||||
msg = "Cannot use text query without an embedder"
|
||||
raise EmbedderNotConfiguredError(msg)
|
||||
embedding = self.embedder.embed(query)
|
||||
else:
|
||||
# query is already a list[float]
|
||||
embedding = query
|
||||
|
||||
return await self.engine.semantic_search(
|
||||
embedding, self.collection, limit, conditions, threshold
|
||||
)
|
||||
|
||||
@@ -17,6 +17,10 @@ class Settings(VaultSettings):
|
||||
qdrant_url: The URL endpoint for the vector database server (e.g., Qdrant).
|
||||
qdrant_api_key: Optional API key for authenticating with the vector database.
|
||||
If None, the connection will be made without authentication.
|
||||
azure_openai_endpoint: Azure OpenAI endpoint URL.
|
||||
azure_openai_api_key: Azure OpenAI API key.
|
||||
azure_openai_api_version: Azure OpenAI API version (e.g., "2024-02-01").
|
||||
azure_openai_embedding_model: Azure OpenAI embedding model name.
|
||||
|
||||
Example:
|
||||
>>> settings = Settings()
|
||||
@@ -34,3 +38,7 @@ class Settings(VaultSettings):
|
||||
|
||||
qdrant_url: str
|
||||
qdrant_api_key: str | None = None
|
||||
azure_openai_endpoint: str | None = None
|
||||
azure_openai_api_key: str | None = None
|
||||
azure_openai_api_version: str | None = None
|
||||
azure_openai_embedding_model: str | None = None
|
||||
|
||||
115
src/searchbox/embedder/__init__.py
Normal file
115
src/searchbox/embedder/__init__.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Embedder package.
|
||||
|
||||
This package provides an abstract embedder interface and concrete implementations
|
||||
for different embedding service providers. It uses a factory pattern with caching
|
||||
to provide efficient embedder instantiation.
|
||||
|
||||
The package includes:
|
||||
- Abstract BaseEmbedder class
|
||||
- AzureEmbedder implementation for Azure OpenAI
|
||||
- EmbedderBackend enum for specifying embedder types
|
||||
- Factory function with overloaded type hints for type safety
|
||||
|
||||
Example:
|
||||
>>> from searchbox.embedder import get_embedder, EmbedderBackend
|
||||
>>> embedder = get_embedder(EmbedderBackend.AZURE)
|
||||
>>> embedding = embedder.embed("Hello world")
|
||||
|
||||
"""
|
||||
|
||||
from enum import StrEnum
|
||||
from functools import cache
|
||||
from typing import Literal, overload
|
||||
|
||||
from .azure import AzureEmbedder
|
||||
from .base import BaseEmbedder
|
||||
|
||||
|
||||
class UnknownEmbedderError(Exception):
|
||||
"""Exception raised when an unknown embedder is requested."""
|
||||
|
||||
def __init__(self, backend: str):
|
||||
"""Initialize the exception with the unknown backend."""
|
||||
super().__init__(f"Unknown embedder type: {backend}")
|
||||
|
||||
|
||||
class EmbedderBackend(StrEnum):
|
||||
"""Enumeration of supported embedder backends.
|
||||
|
||||
This enum defines the available embedder implementations that can
|
||||
be used with the embedder factory. Each backend corresponds to a specific
|
||||
embedding service provider.
|
||||
|
||||
Attributes:
|
||||
AZURE: Azure OpenAI embedder backend
|
||||
|
||||
Example:
|
||||
>>> backend = EmbedderBackend.AZURE
|
||||
>>> print(backend) # "azure"
|
||||
>>> embedder = get_embedder(backend)
|
||||
|
||||
"""
|
||||
|
||||
AZURE = "azure"
|
||||
|
||||
|
||||
@overload
|
||||
def get_embedder(backend: Literal["azure"]) -> AzureEmbedder: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_embedder(
|
||||
backend: Literal["azure"],
|
||||
model: str,
|
||||
azure_endpoint: str,
|
||||
api_key: str,
|
||||
openai_api_version: str,
|
||||
) -> AzureEmbedder: ...
|
||||
|
||||
|
||||
@overload
|
||||
def get_embedder(backend: EmbedderBackend, **kwargs: str) -> BaseEmbedder: ...
|
||||
|
||||
|
||||
@cache
|
||||
def get_embedder(backend: EmbedderBackend, **kwargs: str) -> BaseEmbedder:
|
||||
"""Get an embedder instance for the specified backend.
|
||||
|
||||
This factory function creates and returns embedder instances based on the
|
||||
specified backend type. Instances are cached using functools.cache, so
|
||||
multiple calls with the same backend will return the same instance.
|
||||
|
||||
Args:
|
||||
backend: The embedder backend to use. Must be an EmbedderBackend enum value.
|
||||
**kwargs: Additional keyword arguments to pass to the embedder constructor.
|
||||
|
||||
Returns:
|
||||
An embedder instance implementing the BaseEmbedder interface. The specific
|
||||
type depends on the backend:
|
||||
- EmbedderBackend.AZURE returns AzureEmbedder
|
||||
|
||||
Raises:
|
||||
UnknownEmbedderError: If an unknown backend type is provided.
|
||||
ValueError: If required settings are missing when using from_settings.
|
||||
|
||||
Example:
|
||||
>>> embedder = get_embedder(EmbedderBackend.AZURE)
|
||||
>>> isinstance(embedder, AzureEmbedder) # True
|
||||
|
||||
>>> # Type checker knows the exact type for literals:
|
||||
>>> azure_embedder = get_embedder(EmbedderBackend.AZURE) # Type: AzureEmbedder
|
||||
|
||||
"""
|
||||
if backend == EmbedderBackend.AZURE:
|
||||
return AzureEmbedder(**kwargs) if kwargs else AzureEmbedder.from_settings()
|
||||
else:
|
||||
raise UnknownEmbedderError(backend)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BaseEmbedder",
|
||||
"AzureEmbedder",
|
||||
"EmbedderBackend",
|
||||
"get_embedder",
|
||||
"UnknownEmbedderError",
|
||||
]
|
||||
106
src/searchbox/embedder/azure.py
Normal file
106
src/searchbox/embedder/azure.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Embedder class using Azure AI Foundry."""
|
||||
|
||||
from openai import AzureOpenAI
|
||||
|
||||
from ..config import Settings
|
||||
from .base import BaseEmbedder
|
||||
|
||||
|
||||
class AzureEmbedder(BaseEmbedder):
|
||||
"""Embedder implementation using Azure OpenAI Service.
|
||||
|
||||
Provides text embedding generation through Azure's OpenAI API endpoint.
|
||||
Compatible with any Azure OpenAI embedding model (text-embedding-ada-002,
|
||||
text-embedding-3-small, text-embedding-3-large, etc.).
|
||||
|
||||
Args:
|
||||
model: The embedding model name (e.g., "text-embedding-3-large")
|
||||
azure_endpoint: Azure OpenAI endpoint URL
|
||||
api_key: Azure OpenAI API key
|
||||
openai_api_version: API version (e.g., "2024-02-01")
|
||||
|
||||
Example:
|
||||
>>> embedder = AzureEmbedder(
|
||||
... model="text-embedding-3-large",
|
||||
... azure_endpoint="https://chatocp.openai.azure.com/",
|
||||
... api_key="your-api-key",
|
||||
... openai_api_version="2024-02-01"
|
||||
... )
|
||||
>>> embedding = embedder.embed("Hello world")
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: str,
|
||||
azure_endpoint: str,
|
||||
api_key: str,
|
||||
openai_api_version: str,
|
||||
):
|
||||
"""Initialize the Azure OpenAI embedder.
|
||||
|
||||
Args:
|
||||
model: The embedding model name (e.g., "text-embedding-3-large")
|
||||
azure_endpoint: Azure OpenAI endpoint URL
|
||||
api_key: Azure OpenAI API key
|
||||
openai_api_version: API version (e.g., "2024-02-01")
|
||||
|
||||
"""
|
||||
self.model = model
|
||||
self.client = AzureOpenAI(
|
||||
azure_endpoint=azure_endpoint,
|
||||
api_key=api_key,
|
||||
api_version=openai_api_version,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_settings(cls) -> "AzureEmbedder":
|
||||
"""Initialize the Azure embedder from Settings.
|
||||
|
||||
Returns:
|
||||
Initialized AzureEmbedder instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If required settings are not configured.
|
||||
|
||||
"""
|
||||
settings = Settings() # type: ignore[reportCallArgs]
|
||||
|
||||
if not all(
|
||||
[
|
||||
settings.azure_openai_endpoint,
|
||||
settings.azure_openai_api_key,
|
||||
settings.azure_openai_api_version,
|
||||
settings.azure_openai_embedding_model,
|
||||
]
|
||||
):
|
||||
msg = (
|
||||
"Missing required Azure OpenAI settings. "
|
||||
"Ensure AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_API_KEY, "
|
||||
"AZURE_OPENAI_API_VERSION, and AZURE_OPENAI_EMBEDDING_MODEL "
|
||||
"are set."
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
return cls(
|
||||
model=settings.azure_openai_embedding_model, # type: ignore[arg-type]
|
||||
azure_endpoint=settings.azure_openai_endpoint, # type: ignore[arg-type]
|
||||
api_key=settings.azure_openai_api_key, # type: ignore[arg-type]
|
||||
openai_api_version=settings.azure_openai_api_version, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
def embed(self, text: str) -> list[float]:
|
||||
"""Generate embedding vector for the given text.
|
||||
|
||||
Args:
|
||||
text: Input text to embed
|
||||
|
||||
Returns:
|
||||
List of floats representing the embedding vector
|
||||
|
||||
"""
|
||||
response = self.client.embeddings.create(
|
||||
model=self.model,
|
||||
input=text,
|
||||
)
|
||||
return response.data[0].embedding
|
||||
50
src/searchbox/embedder/base.py
Normal file
50
src/searchbox/embedder/base.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Base embedder interface for text embedding models.
|
||||
|
||||
This module defines the abstract base class that all embedder implementations
|
||||
must inherit from, ensuring a consistent interface across different embedding
|
||||
providers (Azure OpenAI, FastEmbed, OpenAI, Cohere, etc.).
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class BaseEmbedder(ABC):
|
||||
"""Abstract base class for text embedding models.
|
||||
|
||||
This class defines the interface that all embedder implementations must follow,
|
||||
allowing the system to work with any embedding model provider through a
|
||||
unified API.
|
||||
|
||||
Implementations should inherit from this class and provide concrete
|
||||
implementations of the embed() method for their specific embedding service.
|
||||
|
||||
Example:
|
||||
>>> class MyEmbedder(BaseEmbedder):
|
||||
... def embed(self, text: str) -> list[float]:
|
||||
... # Implementation specific to your embedding service
|
||||
... return [0.1, 0.2, 0.3, ...]
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def embed(self, text: str) -> list[float]:
|
||||
"""Generate embedding vector for the given text.
|
||||
|
||||
This method must be implemented by all concrete embedder classes to
|
||||
convert input text into a dense vector representation.
|
||||
|
||||
Args:
|
||||
text: Input text to embed
|
||||
|
||||
Returns:
|
||||
A list of floats representing the embedding vector. The dimension
|
||||
of the vector depends on the specific embedding model being used.
|
||||
|
||||
Example:
|
||||
>>> embedder = SomeEmbedder()
|
||||
>>> vector = embedder.embed("Hello world")
|
||||
>>> len(vector)
|
||||
1536
|
||||
|
||||
"""
|
||||
...
|
||||
@@ -1,13 +1,13 @@
|
||||
"""Main MCP server implementation for vector search operations.
|
||||
|
||||
This module sets up and configures the FastMCP server with vector search capabilities.
|
||||
It creates a Qdrant engine instance and exposes the semantic search functionality
|
||||
It creates engine and embedder instances and exposes the semantic search functionality
|
||||
as an MCP tool.
|
||||
|
||||
The server provides:
|
||||
- Semantic search tool for vector similarity queries
|
||||
- Support for various search conditions and filters
|
||||
- Integration with Qdrant vector database
|
||||
- Integration with vector databases and embedding services
|
||||
|
||||
Example:
|
||||
The server is typically started using the run function from the package:
|
||||
@@ -22,30 +22,41 @@ from fastmcp import FastMCP
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from ..engine import get_engine
|
||||
from ..client import Client
|
||||
from ..embedder import EmbedderBackend, get_embedder
|
||||
|
||||
mcp = FastMCP("Searchbox MCP")
|
||||
|
||||
engine_map = {"qdrant": get_engine("qdrant")}
|
||||
# Initialize embedder map
|
||||
embedder_map = {
|
||||
"azure": get_embedder(EmbedderBackend.AZURE),
|
||||
}
|
||||
|
||||
|
||||
@mcp.tool(exclude_args=["backend", "embedding", "collection", "limit", "threshold"])
|
||||
@mcp.tool(exclude_args=["backend", "collection", "embedder", "limit", "threshold"])
|
||||
async def get_information(
|
||||
query: Annotated[str, "The user query"],
|
||||
backend: str = "qdrant",
|
||||
embedding: list[float] = [],
|
||||
collection: str = "default",
|
||||
embedder: str = "azure",
|
||||
limit: int = 10,
|
||||
threshold: float | None = None,
|
||||
):
|
||||
"""Search a private repository for information."""
|
||||
_ = query
|
||||
"""Search a private repository for information using semantic search.
|
||||
|
||||
engine = engine_map[backend]
|
||||
|
||||
result = await engine.semantic_search(
|
||||
embedding=embedding,
|
||||
The query will be automatically converted to an embedding vector using
|
||||
the specified embedder before searching.
|
||||
"""
|
||||
# Create client with embedder
|
||||
client = Client(
|
||||
backend=backend, # type: ignore[arg-type]
|
||||
collection=collection,
|
||||
embedder=embedder_map[embedder],
|
||||
)
|
||||
|
||||
# Perform semantic search with automatic embedding
|
||||
result = await client.semantic_search(
|
||||
query=query,
|
||||
limit=limit,
|
||||
threshold=threshold,
|
||||
)
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from fastmcp import Client
|
||||
from fastembed import TextEmbedding
|
||||
|
||||
from searchbox.mcp_server.server import mcp
|
||||
|
||||
embedding_model = TextEmbedding()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mcp_client():
|
||||
@@ -15,19 +13,18 @@ async def mcp_client():
|
||||
|
||||
|
||||
async def test_mcp_qdrant_backend(mcp_client):
|
||||
embedding = list(embedding_model.embed("Quien es el mas guapo"))[0].tolist()
|
||||
|
||||
"""Test MCP server with automatic Azure embedding."""
|
||||
result = await mcp_client.call_tool(
|
||||
name="get_information",
|
||||
arguments={
|
||||
"query": "dummy value",
|
||||
"collection": "dummy_collection",
|
||||
"embedding": embedding,
|
||||
"query": "Quien es el mas guapo",
|
||||
"collection": "azure_collection",
|
||||
},
|
||||
)
|
||||
|
||||
content = json.loads(result.content[0].text)[0]
|
||||
|
||||
assert content["chunk_id"] == "0"
|
||||
assert content["score"] >= 0.7
|
||||
assert content["payload"] == {"text": "Rick es el mas guapo"}
|
||||
assert content["score"] >= 0.65
|
||||
assert content["payload"]["page_content"] == "Rick es el mas guapo"
|
||||
assert content["payload"]["filename"] == "test.txt"
|
||||
assert content["payload"]["page"] == 1
|
||||
|
||||
68
uv.lock
generated
68
uv.lock
generated
@@ -1,5 +1,5 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
revision = 2
|
||||
requires-python = ">=3.13"
|
||||
|
||||
[[package]]
|
||||
@@ -295,6 +295,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/8b/2c95f0645c6f40211896375e6fa51f504b8ccb29c21f6ae661fe87ab044e/cyclopts-3.24.0-py3-none-any.whl", hash = "sha256:809d04cde9108617106091140c3964ee6fceb33cecdd537f7ffa360bde13ed71", size = 86154, upload-time = "2025-09-08T15:40:56.41Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.8.0"
|
||||
@@ -625,6 +634,42 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jiter"
|
||||
version = "0.11.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021, upload-time = "2025-09-15T09:19:43.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384, upload-time = "2025-09-15T09:19:44.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389, upload-time = "2025-09-15T09:19:46.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519, upload-time = "2025-09-15T09:19:47.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198, upload-time = "2025-09-15T09:19:49.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835, upload-time = "2025-09-15T09:19:50.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655, upload-time = "2025-09-15T09:19:51.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135, upload-time = "2025-09-15T09:19:53.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063, upload-time = "2025-09-15T09:19:54.447Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139, upload-time = "2025-09-15T09:19:55.764Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369, upload-time = "2025-09-15T09:19:57.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538, upload-time = "2025-09-15T09:19:58.35Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737, upload-time = "2025-09-15T09:19:59.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183, upload-time = "2025-09-15T09:20:01.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225, upload-time = "2025-09-15T09:20:03.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/3b/e7f45be7d3969bdf2e3cd4b816a7a1d272507cd0edd2d6dc4b07514f2d9a/jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72", size = 304414, upload-time = "2025-09-15T09:20:04.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/32/13e8e0d152631fcc1907ceb4943711471be70496d14888ec6e92034e2caf/jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774", size = 314223, upload-time = "2025-09-15T09:20:05.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/7e/abedd5b5a20ca083f778d96bba0d2366567fcecb0e6e34ff42640d5d7a18/jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0", size = 337306, upload-time = "2025-09-15T09:20:06.917Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/e2/30d59bdc1204c86aa975ec72c48c482fee6633120ee9c3ab755e4dfefea8/jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a", size = 360565, upload-time = "2025-09-15T09:20:08.283Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/88/567288e0d2ed9fa8f7a3b425fdaf2cb82b998633c24fe0d98f5417321aa8/jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773", size = 486465, upload-time = "2025-09-15T09:20:09.613Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/6e/7b72d09273214cadd15970e91dd5ed9634bee605176107db21e1e4205eb1/jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7", size = 377581, upload-time = "2025-09-15T09:20:10.884Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/52/4db456319f9d14deed325f70102577492e9d7e87cf7097bda9769a1fcacb/jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2", size = 347102, upload-time = "2025-09-15T09:20:12.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b4/433d5703c38b26083aec7a733eb5be96f9c6085d0e270a87ca6482cbf049/jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2", size = 386477, upload-time = "2025-09-15T09:20:13.428Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/7a/a60bfd9c55b55b07c5c441c5085f06420b6d493ce9db28d069cc5b45d9f3/jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0", size = 516004, upload-time = "2025-09-15T09:20:14.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/46/f8363e5ecc179b4ed0ca6cb0a6d3bfc266078578c71ff30642ea2ce2f203/jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73", size = 507855, upload-time = "2025-09-15T09:20:16.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/33/396083357d51d7ff0f9805852c288af47480d30dd31d8abc74909b020761/jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2", size = 205802, upload-time = "2025-09-15T09:20:17.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/ab/eb06ca556b2551d41de7d03bf2ee24285fa3d0c58c5f8d95c64c9c3281b1/jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40", size = 313405, upload-time = "2025-09-15T09:20:18.918Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/22/7ab7b4ec3a1c1f03aef376af11d23b05abcca3fb31fbca1e7557053b1ba2/jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406", size = 347102, upload-time = "2025-09-15T09:20:20.16Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.25.1"
|
||||
@@ -933,6 +978,25 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/59/0db51308fa479f9325ade08c343a5164153ad01dbb83b62ff661e1129d2e/onnxruntime-1.23.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ed85686e08cfb29ee96365b9a49e8a350aff7557c13d63d9f07ca3ad68975074", size = 17281939, upload-time = "2025-09-25T19:16:16.16Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "distro" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jiter" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "tqdm" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/5d/74fa2b0358ef15d113b1a6ca2323cee0034020b085a81a94eeddc6914de9/openai-2.0.0.tar.gz", hash = "sha256:6b9513b485f856b0be6bc44c518831acb58e37a12bed72fcc52b1177d1fb34a8", size = 565732, upload-time = "2025-09-30T17:35:57.632Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/41/86ddc9cdd885acc02ee50ec24ea1c5e324eea0c7a471ee841a7088653558/openai-2.0.0-py3-none-any.whl", hash = "sha256:a79f493651f9843a6c54789a83f3b2db56df0e1770f7dcbe98bcf0e967ee2148", size = 955538, upload-time = "2025-09-30T17:35:54.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-core"
|
||||
version = "0.19.5"
|
||||
@@ -1530,6 +1594,7 @@ name = "searchbox"
|
||||
version = "0.1.1"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "openai" },
|
||||
{ name = "qdrant-client" },
|
||||
{ name = "vault-settings" },
|
||||
]
|
||||
@@ -1552,6 +1617,7 @@ dev = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "fastmcp", marker = "extra == 'mcp'", specifier = ">=2.12.4" },
|
||||
{ name = "openai", specifier = ">=2.0.0" },
|
||||
{ name = "qdrant-client", specifier = "==1.13" },
|
||||
{ name = "vault-settings", specifier = ">=0.1.0" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user