Fix: Agrega logs para las operaciones en la base de datos #4

Merged
A8065384 merged 8 commits from fix/logs into main 2026-02-24 22:37:15 +00:00
3 changed files with 108 additions and 0 deletions
Showing only changes of commit 3b7dd91a71 - Show all commits

4
utils/__init__.py Normal file
View File

@@ -0,0 +1,4 @@
from .config import Settings, _args
from .logging_setup import log_structured_entry
__all__ = ['Settings', '_args', 'log_structured_entry']

54
utils/config.py Normal file
View File

@@ -0,0 +1,54 @@
import os
import argparse
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, YamlConfigSettingsSource
def _parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--transport",
choices=["stdio", "sse"],
default="stdio",
)
parser.add_argument("--host", default="0.0.0.0")
parser.add_argument("--port", type=int, default=8080)
parser.add_argument(
"--config",
default=os.environ.get("CONFIG_FILE", "config.yaml"),
)
return parser.parse_args()
_args = _parse_args()
class Settings(BaseSettings):
"""Server configuration populated from env vars and a YAML config file."""
model_config = {"env_file": ".env", "yaml_file": _args.config}
project_id: str
location: str
bucket: str
index_name: str
deployed_index_id: str
endpoint_name: str
endpoint_domain: str
embedding_model: str = "gemini-embedding-001"
search_limit: int = 10
@classmethod
def settings_customise_sources(
cls,
settings_cls: type[BaseSettings],
init_settings: PydanticBaseSettingsSource,
env_settings: PydanticBaseSettingsSource,
dotenv_settings: PydanticBaseSettingsSource,
file_secret_settings: PydanticBaseSettingsSource,
) -> tuple[PydanticBaseSettingsSource, ...]:
return (
init_settings,
env_settings,
dotenv_settings,
YamlConfigSettingsSource(settings_cls),
file_secret_settings,
)

50
utils/logging_setup.py Normal file
View File

@@ -0,0 +1,50 @@
"""
Centralized Cloud Logging setup.
Uses CloudLoggingHandler (background thread) so logging does not add latency
"""
import logging
from typing import Optional, Dict, Literal
import google.cloud.logging
from google.cloud.logging.handlers import CloudLoggingHandler
from .config import Settings
def _setup_logger() -> logging.Logger:
"""Create or return the singleton evaluation logger."""
log_name = "va_agent-evaluation-logs"
logger = logging.getLogger(log_name)
cfg = Settings.model_validate({})
A8080816 marked this conversation as resolved
Review

Se instancia Settings dos veces, podemos centralizarlo en config

Se instancia `Settings` dos veces, podemos centralizarlo en `config`
if any(isinstance(h, CloudLoggingHandler) for h in logger.handlers):
return logger
try:
client = google.cloud.logging.Client(project=cfg.project_id)
handler = CloudLoggingHandler(client, name=log_name) # async transport
logger.addHandler(handler)
logger.setLevel(logging.INFO)
A8080816 marked this conversation as resolved
Review

Convertir LOG LEVEL en parámetro de ambiente también

Convertir LOG LEVEL en parámetro de ambiente también
except Exception as e:
# Fallback to console if Cloud Logging is unavailable (local dev)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(log_name)
logger.warning("Cloud Logging setup failed; using console. Error: %s", e)
return logger
_eval_log = _setup_logger()
def log_structured_entry(message: str, severity: Literal["INFO", "WARNING", "ERROR"], custom_log: Optional[Dict] = None) -> None:
"""
Emit a JSON-structured log row.
Args:
message: Short label for the row (e.g., "Final agent turn").
severity: "INFO" | "WARNING" | "ERROR" etc.
custom_log: A dict with your structured payload.
"""
level = getattr(logging, severity.upper(), logging.INFO)
_eval_log.log(level, message, extra={"json_fields": {"message": message, "custom": custom_log or {}}})