.
This commit is contained in:
11
src/capa_de_integracion/__init__.py
Normal file
11
src/capa_de_integracion/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is,
|
||||
without warranty or representation for any use or purpose.
|
||||
Your use of it is subject to your agreement with Google.
|
||||
|
||||
Capa de Integración - Conversational AI Orchestrator Service
|
||||
"""
|
||||
|
||||
from .main import main, app
|
||||
|
||||
__all__ = ["main", "app"]
|
||||
5
src/capa_de_integracion/config/__init__.py
Normal file
5
src/capa_de_integracion/config/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Configuration module."""
|
||||
|
||||
from .settings import Settings, get_settings
|
||||
|
||||
__all__ = ["Settings", "get_settings"]
|
||||
113
src/capa_de_integracion/config/settings.py
Normal file
113
src/capa_de_integracion/config/settings.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Application configuration settings.
|
||||
"""
|
||||
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application configuration from environment variables."""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env", env_file_encoding="utf-8", case_sensitive=False
|
||||
)
|
||||
|
||||
# GCP General
|
||||
gcp_project_id: str = Field(..., alias="GCP_PROJECT_ID")
|
||||
gcp_location: str = Field(default="us-central1", alias="GCP_LOCATION")
|
||||
|
||||
# Firestore
|
||||
firestore_database_id: str = Field(..., alias="GCP_FIRESTORE_DATABASE_ID")
|
||||
firestore_host: str = Field(
|
||||
default="firestore.googleapis.com", alias="GCP_FIRESTORE_HOST"
|
||||
)
|
||||
firestore_port: int = Field(default=443, alias="GCP_FIRESTORE_PORT")
|
||||
firestore_importer_enabled: bool = Field(
|
||||
default=False, alias="GCP_FIRESTORE_IMPORTER_ENABLE"
|
||||
)
|
||||
|
||||
# Redis
|
||||
redis_host: str = Field(..., alias="REDIS_HOST")
|
||||
redis_port: int = Field(default=6379, alias="REDIS_PORT")
|
||||
redis_password: str | None = Field(default=None, alias="REDIS_PWD")
|
||||
redis_ssl: bool = Field(default=False)
|
||||
|
||||
# Dialogflow CX
|
||||
dialogflow_project_id: str = Field(..., alias="DIALOGFLOW_CX_PROJECT_ID")
|
||||
dialogflow_location: str = Field(..., alias="DIALOGFLOW_CX_LOCATION")
|
||||
dialogflow_agent_id: str = Field(..., alias="DIALOGFLOW_CX_AGENT_ID")
|
||||
dialogflow_default_language: str = Field(
|
||||
default="es", alias="DIALOGFLOW_DEFAULT_LANGUAGE_CODE"
|
||||
)
|
||||
|
||||
# Gemini
|
||||
gemini_model_name: str = Field(
|
||||
default="gemini-2.0-flash-001", alias="GEMINI_MODEL_NAME"
|
||||
)
|
||||
|
||||
# Message Filter (Gemini)
|
||||
message_filter_model: str = Field(
|
||||
default="gemini-2.0-flash-001", alias="MESSAGE_FILTER_GEMINI_MODEL"
|
||||
)
|
||||
message_filter_temperature: float = Field(
|
||||
default=0.1, alias="MESSAGE_FILTER_TEMPERATURE"
|
||||
)
|
||||
message_filter_max_tokens: int = Field(
|
||||
default=10, alias="MESSAGE_FILTER_MAX_OUTPUT_TOKENS"
|
||||
)
|
||||
message_filter_top_p: float = Field(default=0.1, alias="MESSAGE_FILTER_TOP_P")
|
||||
message_filter_prompt_path: str = Field(default="prompts/message_filter_prompt.txt")
|
||||
|
||||
# Notification Context Resolver (Gemini)
|
||||
notification_context_model: str = Field(
|
||||
default="gemini-2.0-flash-001", alias="NOTIFICATION_CONTEXT_GEMINI_MODEL"
|
||||
)
|
||||
notification_context_temperature: float = Field(
|
||||
default=0.1, alias="NOTIFICATION_CONTEXT_TEMPERATURE"
|
||||
)
|
||||
notification_context_max_tokens: int = Field(
|
||||
default=1024, alias="NOTIFICATION_CONTEXT_MAX_OUTPUT_TOKENS"
|
||||
)
|
||||
notification_context_top_p: float = Field(
|
||||
default=0.1, alias="NOTIFICATION_CONTEXT_TOP_P"
|
||||
)
|
||||
notification_context_prompt_path: str = Field(
|
||||
default="prompts/notification_context_resolver.txt"
|
||||
)
|
||||
|
||||
# DLP
|
||||
dlp_template_complete_flow: str = Field(..., alias="DLP_TEMPLATE_COMPLETE_FLOW")
|
||||
|
||||
# Conversation Context
|
||||
conversation_context_message_limit: int = Field(
|
||||
default=60, alias="CONVERSATION_CONTEXT_MESSAGE_LIMIT"
|
||||
)
|
||||
conversation_context_days_limit: int = Field(
|
||||
default=30, alias="CONVERSATION_CONTEXT_DAYS_LIMIT"
|
||||
)
|
||||
|
||||
# Logging
|
||||
log_level: str = Field(default="INFO", alias="LOGGING_LEVEL_ROOT")
|
||||
|
||||
@property
|
||||
def dialogflow_endpoint(self) -> str:
|
||||
"""Get Dialogflow regional endpoint."""
|
||||
return f"{self.dialogflow_location}-dialogflow.googleapis.com:443"
|
||||
|
||||
@property
|
||||
def base_path(self) -> Path:
|
||||
"""Get base path for resources."""
|
||||
return Path(__file__).parent.parent / "resources"
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_settings() -> Settings:
|
||||
"""Get cached settings instance."""
|
||||
return Settings()
|
||||
15
src/capa_de_integracion/controllers/__init__.py
Normal file
15
src/capa_de_integracion/controllers/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Controllers module."""
|
||||
|
||||
from .conversation import router as conversation_router
|
||||
from .notification import router as notification_router
|
||||
from .llm_webhook import router as llm_webhook_router
|
||||
from .quick_replies import router as quick_replies_router
|
||||
from .data_purge import router as data_purge_router
|
||||
|
||||
__all__ = [
|
||||
"conversation_router",
|
||||
"notification_router",
|
||||
"llm_webhook_router",
|
||||
"quick_replies_router",
|
||||
"data_purge_router",
|
||||
]
|
||||
49
src/capa_de_integracion/controllers/conversation.py
Normal file
49
src/capa_de_integracion/controllers/conversation.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Conversation API endpoints.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from ..models import ExternalConvRequestDTO, DetectIntentResponseDTO
|
||||
from ..services import ConversationManagerService
|
||||
from ..dependencies import get_conversation_manager
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api/v1/dialogflow", tags=["conversation"])
|
||||
|
||||
|
||||
@router.post("/detect-intent", response_model=DetectIntentResponseDTO)
|
||||
async def detect_intent(
|
||||
request: ExternalConvRequestDTO,
|
||||
conversation_manager: ConversationManagerService = Depends(
|
||||
get_conversation_manager
|
||||
),
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Detect user intent and manage conversation.
|
||||
|
||||
Args:
|
||||
request: External conversation request from client
|
||||
|
||||
Returns:
|
||||
Dialogflow detect intent response
|
||||
"""
|
||||
try:
|
||||
logger.info("Received detect-intent request")
|
||||
response = await conversation_manager.manage_conversation(request)
|
||||
logger.info("Successfully processed detect-intent request")
|
||||
return response
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing detect-intent: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
44
src/capa_de_integracion/controllers/data_purge.py
Normal file
44
src/capa_de_integracion/controllers/data_purge.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Data purge API endpoints.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from ..services.data_purge import DataPurgeService
|
||||
from ..services.redis_service import RedisService
|
||||
from ..dependencies import get_redis_service, get_settings
|
||||
from ..config import Settings
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api/v1/data-purge", tags=["data-purge"])
|
||||
|
||||
|
||||
@router.delete("/all")
|
||||
async def purge_all_data(
|
||||
redis_service: RedisService = Depends(get_redis_service),
|
||||
settings: Settings = Depends(get_settings),
|
||||
) -> None:
|
||||
"""
|
||||
Purge all data from Redis and Firestore.
|
||||
|
||||
WARNING: This is a destructive operation that will delete all conversation
|
||||
and notification data from both Redis and Firestore.
|
||||
"""
|
||||
logger.warning(
|
||||
"Received request to purge all data. This is a destructive operation."
|
||||
)
|
||||
|
||||
try:
|
||||
purge_service = DataPurgeService(settings, redis_service)
|
||||
await purge_service.purge_all_data()
|
||||
await purge_service.close()
|
||||
logger.info("Successfully purged all data")
|
||||
except Exception as e:
|
||||
logger.error(f"Error purging all data: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
99
src/capa_de_integracion/controllers/llm_webhook.py
Normal file
99
src/capa_de_integracion/controllers/llm_webhook.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
LLM webhook API endpoints for Dialogflow CX integration.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from ..models.llm_webhook import WebhookRequestDTO, WebhookResponseDTO, SessionInfoDTO
|
||||
from ..services.llm_response_tuner import LlmResponseTunerService
|
||||
from ..dependencies import get_llm_response_tuner
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api/v1/llm", tags=["llm-webhook"])
|
||||
|
||||
|
||||
@router.post("/tune-response", response_model=WebhookResponseDTO)
|
||||
async def tune_response(
|
||||
request: WebhookRequestDTO,
|
||||
llm_tuner: LlmResponseTunerService = Depends(get_llm_response_tuner),
|
||||
) -> WebhookResponseDTO:
|
||||
"""
|
||||
Dialogflow CX webhook to retrieve pre-generated LLM responses.
|
||||
|
||||
This endpoint is called by Dialogflow when an intent requires an
|
||||
LLM-generated response. The UUID is passed in session parameters,
|
||||
and the service retrieves the pre-computed response from Redis.
|
||||
|
||||
Flow:
|
||||
1. Dialogflow sends webhook request with UUID in parameters
|
||||
2. Service retrieves response from Redis (1-hour TTL)
|
||||
3. Returns response in session parameters
|
||||
|
||||
Args:
|
||||
request: Webhook request containing session info with UUID
|
||||
|
||||
Returns:
|
||||
Webhook response with response text or error
|
||||
|
||||
Raises:
|
||||
HTTPException: 400 for validation errors, 500 for internal errors
|
||||
"""
|
||||
try:
|
||||
# Extract UUID from session parameters
|
||||
uuid = request.sessionInfo.parameters.get("uuid")
|
||||
|
||||
if not uuid:
|
||||
logger.error("No UUID provided in webhook request")
|
||||
return _create_error_response("UUID parameter is required", is_error=True)
|
||||
|
||||
# Retrieve response from Redis
|
||||
response_text = await llm_tuner.get_value(uuid)
|
||||
|
||||
if response_text:
|
||||
# Success - return response
|
||||
logger.info(f"Successfully retrieved response for UUID: {uuid}")
|
||||
return WebhookResponseDTO(
|
||||
sessionInfo=SessionInfoDTO(
|
||||
parameters={
|
||||
"webhook_success": True,
|
||||
"response": response_text,
|
||||
}
|
||||
)
|
||||
)
|
||||
else:
|
||||
# Not found
|
||||
logger.warning(f"No response found for UUID: {uuid}")
|
||||
return _create_error_response(
|
||||
"No response found for the given UUID.", is_error=False
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in tune-response webhook: {e}", exc_info=True)
|
||||
return _create_error_response("An internal error occurred.", is_error=True)
|
||||
|
||||
|
||||
def _create_error_response(error_message: str, is_error: bool) -> WebhookResponseDTO:
|
||||
"""
|
||||
Create error response for webhook.
|
||||
|
||||
Args:
|
||||
error_message: Error message to return
|
||||
is_error: Whether this is a critical error
|
||||
|
||||
Returns:
|
||||
Webhook response with error info
|
||||
"""
|
||||
return WebhookResponseDTO(
|
||||
sessionInfo=SessionInfoDTO(
|
||||
parameters={
|
||||
"webhook_success": False,
|
||||
"error_message": error_message,
|
||||
}
|
||||
)
|
||||
)
|
||||
61
src/capa_de_integracion/controllers/notification.py
Normal file
61
src/capa_de_integracion/controllers/notification.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Notification API endpoints.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from ..models.notification import ExternalNotRequestDTO
|
||||
from ..services.notification_manager import NotificationManagerService
|
||||
from ..dependencies import get_notification_manager
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api/v1/dialogflow", tags=["notifications"])
|
||||
|
||||
|
||||
@router.post("/notification", status_code=200)
|
||||
async def process_notification(
|
||||
request: ExternalNotRequestDTO,
|
||||
notification_manager: NotificationManagerService = Depends(
|
||||
get_notification_manager
|
||||
),
|
||||
) -> None:
|
||||
"""
|
||||
Process push notification from external system.
|
||||
|
||||
This endpoint receives notifications (e.g., "Your card was blocked") and:
|
||||
1. Stores them in Redis/Firestore
|
||||
2. Associates them with the user's conversation session
|
||||
3. Triggers a Dialogflow event
|
||||
|
||||
When the user later sends a message asking about the notification
|
||||
("Why was it blocked?"), the message filter will classify it as
|
||||
NOTIFICATION and route to the appropriate handler.
|
||||
|
||||
Args:
|
||||
request: External notification request with text, phone, and parameters
|
||||
|
||||
Returns:
|
||||
None (204 No Content)
|
||||
|
||||
Raises:
|
||||
HTTPException: 400 if validation fails, 500 for internal errors
|
||||
"""
|
||||
try:
|
||||
logger.info("Received notification request")
|
||||
await notification_manager.process_notification(request)
|
||||
logger.info("Successfully processed notification request")
|
||||
# Match Java behavior: process but don't return response body
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing notification: {str(e)}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
112
src/capa_de_integracion/controllers/quick_replies.py
Normal file
112
src/capa_de_integracion/controllers/quick_replies.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Quick Replies API endpoints.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from ..models.quick_replies import QuickReplyScreenRequestDTO
|
||||
from ..models import DetectIntentResponseDTO
|
||||
from ..services.quick_reply_content import QuickReplyContentService
|
||||
from ..services.redis_service import RedisService
|
||||
from ..services.firestore_service import FirestoreService
|
||||
from ..utils.session_id import generate_session_id
|
||||
from ..models.conversation import ConversationSessionDTO, ConversationEntryDTO
|
||||
from ..dependencies import get_redis_service, get_firestore_service, get_settings
|
||||
from ..config import Settings
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api/v1/quick-replies", tags=["quick-replies"])
|
||||
|
||||
|
||||
@router.post("/screen", response_model=DetectIntentResponseDTO)
|
||||
async def start_quick_reply_session(
|
||||
request: QuickReplyScreenRequestDTO,
|
||||
redis_service: RedisService = Depends(get_redis_service),
|
||||
firestore_service: FirestoreService = Depends(get_firestore_service),
|
||||
settings: Settings = Depends(get_settings),
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Start a quick reply FAQ session for a specific screen.
|
||||
|
||||
Creates a conversation session with pantalla_contexto set,
|
||||
loads the quick reply questions for the screen, and returns them.
|
||||
|
||||
Args:
|
||||
request: Quick reply screen request
|
||||
|
||||
Returns:
|
||||
Detect intent response with quick reply questions
|
||||
"""
|
||||
try:
|
||||
telefono = request.telefono
|
||||
if not telefono or not telefono.strip():
|
||||
raise ValueError("Phone number is required")
|
||||
|
||||
# Generate session ID
|
||||
session_id = generate_session_id()
|
||||
user_id = f"user_by_phone_{telefono.replace(' ', '').replace('-', '')}"
|
||||
|
||||
# Create system entry
|
||||
system_entry = ConversationEntryDTO(
|
||||
entity="SISTEMA",
|
||||
type="INICIO",
|
||||
timestamp=datetime.now(),
|
||||
text=f"Pantalla: {request.pantalla_contexto} Agregada a la conversacion",
|
||||
parameters=None,
|
||||
intent=None,
|
||||
)
|
||||
|
||||
# Create new session with pantalla_contexto
|
||||
new_session = ConversationSessionDTO(
|
||||
sessionId=session_id,
|
||||
userId=user_id,
|
||||
telefono=telefono,
|
||||
createdAt=datetime.now(),
|
||||
lastModified=datetime.now(),
|
||||
lastMessage=system_entry.text,
|
||||
pantallaContexto=request.pantalla_contexto,
|
||||
)
|
||||
|
||||
# Save session and entry
|
||||
await redis_service.save_session(new_session)
|
||||
logger.info(
|
||||
f"Created quick reply session {session_id} for screen: {request.pantalla_contexto}"
|
||||
)
|
||||
|
||||
# Load quick replies
|
||||
content_service = QuickReplyContentService(settings)
|
||||
quick_reply_dto = await content_service.get_quick_replies(
|
||||
request.pantalla_contexto
|
||||
)
|
||||
|
||||
if not quick_reply_dto:
|
||||
raise ValueError(
|
||||
f"Quick reply screen not found: {request.pantalla_contexto}"
|
||||
)
|
||||
|
||||
# Background save to Firestore
|
||||
try:
|
||||
await firestore_service.save_session(new_session)
|
||||
await firestore_service.save_entry(session_id, system_entry)
|
||||
except Exception as e:
|
||||
logger.error(f"Background Firestore save failed: {e}")
|
||||
|
||||
return DetectIntentResponseDTO(
|
||||
responseId=session_id,
|
||||
queryResult=None,
|
||||
quick_replies=quick_reply_dto,
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Validation error: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting quick reply session: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
196
src/capa_de_integracion/dependencies.py
Normal file
196
src/capa_de_integracion/dependencies.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
FastAPI dependency injection.
|
||||
"""
|
||||
|
||||
|
||||
from .config import get_settings, Settings
|
||||
from .services import (
|
||||
DialogflowClientService,
|
||||
GeminiClientService,
|
||||
ConversationManagerService,
|
||||
MessageEntryFilter,
|
||||
NotificationManagerService,
|
||||
NotificationContextResolver,
|
||||
DLPService,
|
||||
LlmResponseTunerService,
|
||||
)
|
||||
from .services.redis_service import RedisService
|
||||
from .services.firestore_service import FirestoreService
|
||||
|
||||
|
||||
# Global service instances (initialized at startup)
|
||||
_dialogflow_client: DialogflowClientService | None = None
|
||||
_gemini_client: GeminiClientService | None = None
|
||||
_message_filter: MessageEntryFilter | None = None
|
||||
_notification_context_resolver: NotificationContextResolver | None = None
|
||||
_dlp_service: DLPService | None = None
|
||||
_redis_service: RedisService | None = None
|
||||
_firestore_service: FirestoreService | None = None
|
||||
_conversation_manager: ConversationManagerService | None = None
|
||||
_notification_manager: NotificationManagerService | None = None
|
||||
_llm_response_tuner: LlmResponseTunerService | None = None
|
||||
|
||||
|
||||
def init_services(settings: Settings):
|
||||
"""Initialize all services at startup."""
|
||||
global \
|
||||
_dialogflow_client, \
|
||||
_gemini_client, \
|
||||
_message_filter, \
|
||||
_notification_context_resolver, \
|
||||
_dlp_service, \
|
||||
_redis_service, \
|
||||
_firestore_service, \
|
||||
_conversation_manager, \
|
||||
_notification_manager, \
|
||||
_llm_response_tuner
|
||||
|
||||
_dialogflow_client = DialogflowClientService(settings)
|
||||
_gemini_client = GeminiClientService(settings)
|
||||
_message_filter = MessageEntryFilter(settings, _gemini_client)
|
||||
_notification_context_resolver = NotificationContextResolver(
|
||||
settings, _gemini_client
|
||||
)
|
||||
_dlp_service = DLPService(settings)
|
||||
_redis_service = RedisService(settings)
|
||||
_firestore_service = FirestoreService(settings)
|
||||
|
||||
# Note: LlmResponseTunerService will be initialized after Redis connects
|
||||
_llm_response_tuner = None
|
||||
|
||||
# Initialize notification manager (without llm_response_tuner)
|
||||
_notification_manager = NotificationManagerService(
|
||||
settings=settings,
|
||||
dialogflow_client=_dialogflow_client,
|
||||
redis_service=_redis_service,
|
||||
firestore_service=_firestore_service,
|
||||
dlp_service=_dlp_service,
|
||||
)
|
||||
|
||||
# Note: ConversationManagerService will be fully initialized after Redis connects
|
||||
# For now, initialize with placeholder for llm_response_tuner
|
||||
_conversation_manager = None
|
||||
|
||||
|
||||
async def startup_services():
|
||||
"""Connect services at startup."""
|
||||
global \
|
||||
_redis_service, \
|
||||
_llm_response_tuner, \
|
||||
_conversation_manager, \
|
||||
_dialogflow_client, \
|
||||
_message_filter, \
|
||||
_notification_context_resolver, \
|
||||
_dlp_service, \
|
||||
_firestore_service
|
||||
|
||||
settings = get_settings()
|
||||
|
||||
if _redis_service:
|
||||
await _redis_service.connect()
|
||||
# Initialize LLM Response Tuner after Redis connects
|
||||
if _redis_service.redis:
|
||||
_llm_response_tuner = LlmResponseTunerService(_redis_service.redis)
|
||||
|
||||
# Now initialize ConversationManagerService with all dependencies
|
||||
_conversation_manager = ConversationManagerService(
|
||||
settings=settings,
|
||||
dialogflow_client=_dialogflow_client,
|
||||
redis_service=_redis_service,
|
||||
firestore_service=_firestore_service,
|
||||
dlp_service=_dlp_service,
|
||||
message_filter=_message_filter,
|
||||
notification_context_resolver=_notification_context_resolver,
|
||||
llm_response_tuner=_llm_response_tuner,
|
||||
)
|
||||
|
||||
|
||||
async def shutdown_services():
|
||||
"""Clean up services at shutdown."""
|
||||
global _dialogflow_client, _redis_service, _firestore_service, _dlp_service
|
||||
|
||||
if _dialogflow_client:
|
||||
await _dialogflow_client.close()
|
||||
|
||||
if _redis_service:
|
||||
await _redis_service.close()
|
||||
|
||||
if _firestore_service:
|
||||
await _firestore_service.close()
|
||||
|
||||
if _dlp_service:
|
||||
await _dlp_service.close()
|
||||
|
||||
|
||||
def get_conversation_manager() -> ConversationManagerService:
|
||||
"""Get conversation manager instance."""
|
||||
if _conversation_manager is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _conversation_manager
|
||||
|
||||
|
||||
def get_dialogflow_client() -> DialogflowClientService:
|
||||
"""Get Dialogflow client instance."""
|
||||
if _dialogflow_client is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _dialogflow_client
|
||||
|
||||
|
||||
def get_redis_service() -> RedisService:
|
||||
"""Get Redis service instance."""
|
||||
if _redis_service is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _redis_service
|
||||
|
||||
|
||||
def get_firestore_service() -> FirestoreService:
|
||||
"""Get Firestore service instance."""
|
||||
if _firestore_service is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _firestore_service
|
||||
|
||||
|
||||
def get_gemini_client() -> GeminiClientService:
|
||||
"""Get Gemini client instance."""
|
||||
if _gemini_client is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _gemini_client
|
||||
|
||||
|
||||
def get_message_filter() -> MessageEntryFilter:
|
||||
"""Get message filter instance."""
|
||||
if _message_filter is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _message_filter
|
||||
|
||||
|
||||
def get_notification_manager() -> NotificationManagerService:
|
||||
"""Get notification manager instance."""
|
||||
if _notification_manager is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _notification_manager
|
||||
|
||||
|
||||
def get_dlp_service() -> DLPService:
|
||||
"""Get DLP service instance."""
|
||||
if _dlp_service is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _dlp_service
|
||||
|
||||
|
||||
def get_notification_context_resolver() -> NotificationContextResolver:
|
||||
"""Get notification context resolver instance."""
|
||||
if _notification_context_resolver is None:
|
||||
raise RuntimeError("Services not initialized. Call init_services first.")
|
||||
return _notification_context_resolver
|
||||
|
||||
|
||||
def get_llm_response_tuner() -> LlmResponseTunerService:
|
||||
"""Get LLM response tuner instance."""
|
||||
if _llm_response_tuner is None:
|
||||
raise RuntimeError("Services not initialized. Call startup_services first.")
|
||||
return _llm_response_tuner
|
||||
102
src/capa_de_integracion/main.py
Normal file
102
src/capa_de_integracion/main.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Main FastAPI application.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from .config import get_settings
|
||||
from .controllers import (
|
||||
conversation_router,
|
||||
notification_router,
|
||||
llm_webhook_router,
|
||||
quick_replies_router,
|
||||
data_purge_router,
|
||||
)
|
||||
from .dependencies import init_services, startup_services, shutdown_services
|
||||
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Application lifespan manager."""
|
||||
# Startup
|
||||
settings = get_settings()
|
||||
logger.info("Initializing services...")
|
||||
init_services(settings)
|
||||
await startup_services()
|
||||
logger.info("Application started successfully")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down services...")
|
||||
await shutdown_services()
|
||||
logger.info("Application shutdown complete")
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Create and configure FastAPI application."""
|
||||
app = FastAPI(
|
||||
title="Capa de Integración - Orchestrator Service",
|
||||
description="Conversational AI orchestrator for Dialogflow CX, Gemini, and Vertex AI",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # Configure appropriately for production
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Register routers
|
||||
app.include_router(conversation_router)
|
||||
app.include_router(notification_router)
|
||||
app.include_router(llm_webhook_router)
|
||||
app.include_router(quick_replies_router)
|
||||
app.include_router(data_purge_router)
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint."""
|
||||
return {"status": "healthy", "service": "capa-de-integracion"}
|
||||
|
||||
return app
|
||||
|
||||
|
||||
# Create app instance
|
||||
app = create_app()
|
||||
|
||||
|
||||
def main():
|
||||
"""Entry point for CLI."""
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(
|
||||
"capa_de_integracion.main:app",
|
||||
host="0.0.0.0",
|
||||
port=8080,
|
||||
reload=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
43
src/capa_de_integracion/models/__init__.py
Normal file
43
src/capa_de_integracion/models/__init__.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Data models module."""
|
||||
|
||||
from .conversation import (
|
||||
ConversationSessionDTO,
|
||||
ConversationEntryDTO,
|
||||
ConversationMessageDTO,
|
||||
ExternalConvRequestDTO,
|
||||
DetectIntentRequestDTO,
|
||||
DetectIntentResponseDTO,
|
||||
QueryInputDTO,
|
||||
TextInputDTO,
|
||||
EventInputDTO,
|
||||
QueryParamsDTO,
|
||||
QueryResultDTO,
|
||||
MessageType,
|
||||
ConversationEntryType,
|
||||
)
|
||||
from .notification import (
|
||||
ExternalNotRequestDTO,
|
||||
NotificationSessionDTO,
|
||||
NotificationDTO,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Conversation
|
||||
"ConversationSessionDTO",
|
||||
"ConversationEntryDTO",
|
||||
"ConversationMessageDTO",
|
||||
"ExternalConvRequestDTO",
|
||||
"DetectIntentRequestDTO",
|
||||
"DetectIntentResponseDTO",
|
||||
"QueryInputDTO",
|
||||
"TextInputDTO",
|
||||
"EventInputDTO",
|
||||
"QueryParamsDTO",
|
||||
"QueryResultDTO",
|
||||
"MessageType",
|
||||
"ConversationEntryType",
|
||||
# Notification
|
||||
"ExternalNotRequestDTO",
|
||||
"NotificationSessionDTO",
|
||||
"NotificationDTO",
|
||||
]
|
||||
173
src/capa_de_integracion/models/conversation.py
Normal file
173
src/capa_de_integracion/models/conversation.py
Normal file
@@ -0,0 +1,173 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Conversation-related data models.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
||||
class MessageType(str, Enum):
|
||||
"""Message type enumeration."""
|
||||
|
||||
USER = "USER"
|
||||
AGENT = "AGENT"
|
||||
|
||||
|
||||
class ConversationEntryType(str, Enum):
|
||||
"""Conversation entry type enumeration."""
|
||||
|
||||
INICIO = "INICIO"
|
||||
CONVERSACION = "CONVERSACION"
|
||||
LLM = "LLM"
|
||||
|
||||
|
||||
class UsuarioDTO(BaseModel):
|
||||
"""User information."""
|
||||
|
||||
telefono: str = Field(..., min_length=1)
|
||||
nickname: str | None = None
|
||||
|
||||
|
||||
class TextInputDTO(BaseModel):
|
||||
"""Text input for queries."""
|
||||
|
||||
text: str
|
||||
|
||||
|
||||
class EventInputDTO(BaseModel):
|
||||
"""Event input for queries."""
|
||||
|
||||
event: str
|
||||
|
||||
|
||||
class QueryParamsDTO(BaseModel):
|
||||
"""Query parameters for Dialogflow requests."""
|
||||
|
||||
parameters: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class QueryInputDTO(BaseModel):
|
||||
"""Query input combining text or event."""
|
||||
|
||||
text: TextInputDTO | None = None
|
||||
event: EventInputDTO | None = None
|
||||
language_code: str = "es"
|
||||
|
||||
@field_validator("text", "event")
|
||||
@classmethod
|
||||
def check_at_least_one(cls, v, info):
|
||||
"""Ensure either text or event is provided."""
|
||||
if info.field_name == "event" and v is None:
|
||||
# Check if text was provided
|
||||
if not info.data.get("text"):
|
||||
raise ValueError("Either text or event must be provided")
|
||||
return v
|
||||
|
||||
|
||||
class DetectIntentRequestDTO(BaseModel):
|
||||
"""Dialogflow detect intent request."""
|
||||
|
||||
query_input: QueryInputDTO
|
||||
query_params: QueryParamsDTO | None = None
|
||||
|
||||
|
||||
class QueryResultDTO(BaseModel):
|
||||
"""Query result from Dialogflow."""
|
||||
|
||||
responseText: str | None = Field(None, alias="responseText")
|
||||
parameters: dict[str, Any] | None = Field(None, alias="parameters")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class DetectIntentResponseDTO(BaseModel):
|
||||
"""Dialogflow detect intent response."""
|
||||
|
||||
responseId: str | None = Field(None, alias="responseId")
|
||||
queryResult: QueryResultDTO | None = Field(None, alias="queryResult")
|
||||
quick_replies: Any | None = None # QuickReplyDTO from quick_replies module
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class ExternalConvRequestDTO(BaseModel):
|
||||
"""External conversation request from client."""
|
||||
|
||||
mensaje: str = Field(..., alias="mensaje")
|
||||
usuario: UsuarioDTO = Field(..., alias="usuario")
|
||||
canal: str = Field(..., alias="canal")
|
||||
tipo: ConversationEntryType = Field(..., alias="tipo")
|
||||
pantalla_contexto: str | None = Field(None, alias="pantallaContexto")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class ConversationMessageDTO(BaseModel):
|
||||
"""Single conversation message."""
|
||||
|
||||
type: str = Field(..., alias="type") # Maps to MessageType
|
||||
timestamp: datetime = Field(default_factory=datetime.now, alias="timestamp")
|
||||
text: str = Field(..., alias="text")
|
||||
parameters: dict[str, Any] | None = Field(None, alias="parameters")
|
||||
canal: str | None = Field(None, alias="canal")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class ConversationEntryDTO(BaseModel):
|
||||
"""Single conversation entry."""
|
||||
|
||||
entity: str = Field(..., alias="entity") # "USUARIO", "AGENTE", "SISTEMA", "LLM"
|
||||
type: str = Field(..., alias="type") # "INICIO", "CONVERSACION", "LLM"
|
||||
timestamp: datetime = Field(default_factory=datetime.now, alias="timestamp")
|
||||
text: str = Field(..., alias="text")
|
||||
parameters: dict[str, Any] | None = Field(None, alias="parameters")
|
||||
canal: str | None = Field(None, alias="canal")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class ConversationSessionDTO(BaseModel):
|
||||
"""Conversation session metadata."""
|
||||
|
||||
sessionId: str = Field(..., alias="sessionId")
|
||||
userId: str = Field(..., alias="userId")
|
||||
telefono: str = Field(..., alias="telefono")
|
||||
createdAt: datetime = Field(default_factory=datetime.now, alias="createdAt")
|
||||
lastModified: datetime = Field(default_factory=datetime.now, alias="lastModified")
|
||||
lastMessage: str | None = Field(None, alias="lastMessage")
|
||||
pantallaContexto: str | None = Field(None, alias="pantallaContexto")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, session_id: str, user_id: str, telefono: str
|
||||
) -> "ConversationSessionDTO":
|
||||
"""Create a new conversation session."""
|
||||
now = datetime.now()
|
||||
return cls(
|
||||
sessionId=session_id,
|
||||
userId=user_id,
|
||||
telefono=telefono,
|
||||
createdAt=now,
|
||||
lastModified=now,
|
||||
)
|
||||
|
||||
def with_last_message(self, last_message: str) -> "ConversationSessionDTO":
|
||||
"""Create copy with updated last message."""
|
||||
return self.model_copy(
|
||||
update={"lastMessage": last_message, "lastModified": datetime.now()}
|
||||
)
|
||||
|
||||
def with_pantalla_contexto(
|
||||
self, pantalla_contexto: str
|
||||
) -> "ConversationSessionDTO":
|
||||
"""Create copy with updated pantalla contexto."""
|
||||
return self.model_copy(update={"pantallaContexto": pantalla_contexto})
|
||||
34
src/capa_de_integracion/models/llm_webhook.py
Normal file
34
src/capa_de_integracion/models/llm_webhook.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
LLM webhook data models for Dialogflow CX webhook integration.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SessionInfoDTO(BaseModel):
|
||||
"""Session info containing parameters."""
|
||||
|
||||
parameters: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class WebhookRequestDTO(BaseModel):
|
||||
"""Dialogflow CX webhook request."""
|
||||
|
||||
sessionInfo: SessionInfoDTO = Field(
|
||||
default_factory=SessionInfoDTO, alias="sessionInfo"
|
||||
)
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class WebhookResponseDTO(BaseModel):
|
||||
"""Dialogflow CX webhook response."""
|
||||
|
||||
sessionInfo: SessionInfoDTO = Field(..., alias="sessionInfo")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
86
src/capa_de_integracion/models/notification.py
Normal file
86
src/capa_de_integracion/models/notification.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Notification-related data models.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class NotificationDTO(BaseModel):
|
||||
"""
|
||||
Individual notification event record.
|
||||
|
||||
Represents a notification to be stored in Firestore and cached in Redis.
|
||||
"""
|
||||
|
||||
idNotificacion: str = Field(
|
||||
..., alias="idNotificacion", description="Unique notification ID"
|
||||
)
|
||||
telefono: str = Field(..., alias="telefono", description="User phone number")
|
||||
timestampCreacion: datetime = Field(
|
||||
default_factory=datetime.now,
|
||||
alias="timestampCreacion",
|
||||
description="Notification creation timestamp",
|
||||
)
|
||||
texto: str = Field(..., alias="texto", description="Notification text content")
|
||||
nombreEventoDialogflow: str = Field(
|
||||
default="notificacion",
|
||||
alias="nombreEventoDialogflow",
|
||||
description="Dialogflow event name",
|
||||
)
|
||||
codigoIdiomaDialogflow: str = Field(
|
||||
default="es",
|
||||
alias="codigoIdiomaDialogflow",
|
||||
description="Dialogflow language code",
|
||||
)
|
||||
parametros: dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
alias="parametros",
|
||||
description="Session parameters for Dialogflow",
|
||||
)
|
||||
status: str = Field(
|
||||
default="active", alias="status", description="Notification status"
|
||||
)
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class NotificationSessionDTO(BaseModel):
|
||||
"""Notification session containing multiple notifications for a phone number."""
|
||||
|
||||
sessionId: str = Field(..., alias="sessionId", description="Session identifier")
|
||||
telefono: str = Field(..., alias="telefono", description="User phone number")
|
||||
fechaCreacion: datetime = Field(
|
||||
default_factory=datetime.now,
|
||||
alias="fechaCreacion",
|
||||
description="Session creation time",
|
||||
)
|
||||
ultimaActualizacion: datetime = Field(
|
||||
default_factory=datetime.now,
|
||||
alias="ultimaActualizacion",
|
||||
description="Last update time",
|
||||
)
|
||||
notificaciones: list[NotificationDTO] = Field(
|
||||
default_factory=list,
|
||||
alias="notificaciones",
|
||||
description="List of notification events",
|
||||
)
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
|
||||
class ExternalNotRequestDTO(BaseModel):
|
||||
"""External notification push request from client."""
|
||||
|
||||
texto: str = Field(..., alias="texto", description="Notification text")
|
||||
telefono: str = Field(..., alias="telefono", description="User phone number")
|
||||
parametros_ocultos: dict[str, Any] | None = Field(
|
||||
None, alias="parametrosOcultos", description="Hidden parameters (metadata)"
|
||||
)
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
43
src/capa_de_integracion/models/quick_replies.py
Normal file
43
src/capa_de_integracion/models/quick_replies.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Quick Replies data models.
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class QuestionDTO(BaseModel):
|
||||
"""Individual FAQ question."""
|
||||
|
||||
titulo: str
|
||||
descripcion: str | None = None
|
||||
respuesta: str
|
||||
|
||||
|
||||
class QuickReplyDTO(BaseModel):
|
||||
"""Quick reply screen with questions."""
|
||||
|
||||
header: str | None = None
|
||||
body: str | None = None
|
||||
button: str | None = None
|
||||
header_section: str | None = None
|
||||
preguntas: list[QuestionDTO] = Field(default_factory=list)
|
||||
|
||||
|
||||
class QuickReplyScreenRequestDTO(BaseModel):
|
||||
"""Request to load a quick reply screen."""
|
||||
|
||||
usuario: dict = Field(..., alias="usuario")
|
||||
canal: str = Field(..., alias="canal")
|
||||
tipo: str = Field(..., alias="tipo")
|
||||
pantalla_contexto: str = Field(..., alias="pantallaContexto")
|
||||
|
||||
model_config = {"populate_by_name": True}
|
||||
|
||||
@property
|
||||
def telefono(self) -> str:
|
||||
"""Extract phone number from usuario."""
|
||||
return self.usuario.get("telefono", "")
|
||||
@@ -0,0 +1,93 @@
|
||||
Hay un sistema de conversaciones entre un agente y un usuario. Durante
|
||||
la conversación, una notificación puede entrar a la conversación de forma
|
||||
abrupta, de tal forma que la siguiente interacción del usuario después
|
||||
de la notificación puede corresponder a la conversación que estaba
|
||||
sucediendo o puede ser un seguimiento a la notificación.
|
||||
|
||||
Tu tarea es identificar si la siguiente interacción del usuario es un
|
||||
seguimiento a la notificación o una continuación de la conversación.
|
||||
|
||||
Recibirás esta información:
|
||||
|
||||
- HISTORIAL_CONVERSACION: El diálogo entre el agente y el usuario antes
|
||||
de la notificación.
|
||||
- INTERRUPCION_NOTIFICACION: La notificación. Esta puede o no traer parámetros
|
||||
los cuales refieren a detalles específicos de la notificación. Por ejemplo:
|
||||
{ "vigencia": “12 de septiembre de 2025”, "credito_tipo" : "platinum" }
|
||||
- INTERACCION_USUARIO: La siguiente interacción del usuario después de
|
||||
la notificación.
|
||||
|
||||
Reglas:
|
||||
- Solo debes responder una palabra: NOTIFICATION o CONVERSATION. No agregues
|
||||
o inventes otra palabra.
|
||||
- Clasifica como NOTIFICATION si la siguiente interacción del usuario
|
||||
es una clara respuesta o seguimiento a la notificación.
|
||||
- Clasifica como CONVERSATION si la siguiente interacción del usuario
|
||||
es un claro seguimiento al histórico de la conversación.
|
||||
- Si la siguiente interacción del usuario es ambigua, clasifica
|
||||
como CONVERSATION.
|
||||
|
||||
Ejemplos:
|
||||
|
||||
Ejemplo 1:
|
||||
HISTORIAL_CONVERSACION:
|
||||
Agente: Claro, para un crédito de vehículo, las tasas actuales inician en el 1.2%% mensual.
|
||||
Usuario: Entiendo, ¿y el plazo máximo de cuánto sería?
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
Tu pago de la tarjeta de crédito por $1,500.00 ha sido procesado.
|
||||
INTERACCION_USUARIO:
|
||||
perfecto, cuando es la fecha de corte?
|
||||
Clasificación: NOTIFICACION
|
||||
|
||||
Ejemplo 2:
|
||||
HISTORIAL_CONVERSACION:
|
||||
Agente: No es necesario, puedes completar todo el proceso para abrir tu cuenta desde nuestra app.
|
||||
Usuario: Ok
|
||||
Agente: ¿Necesitas algo más?
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
Tu estado de cuenta de Julio ya está disponible.
|
||||
Parametros: {"fecha_corte": "30 de Agosto del 2025", "tipo_cuenta": "credito"}
|
||||
INTERACCION_USUARIO:
|
||||
que documentos necesito?
|
||||
Clasificación: CONVERSACION
|
||||
|
||||
Ejemplo 3:
|
||||
HISTORIAL_CONVERSACION:
|
||||
Agente: Ese fondo de inversión tiene un perfil de alto riesgo, pero históricamente ha dado un rendimiento superior al 15%% anual.
|
||||
Usuario: ok, entiendo
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
Alerta: Tu cuenta de ahorros tiene un saldo bajo de $50.00.
|
||||
Parametros: {"fecha_retiro": "5 de septiembre del 2025", "tipo_cuenta": "ahorros"}
|
||||
INTERACCION_USUARIO:
|
||||
cuando fue el ultimo retiro?
|
||||
Clasificación: NOTIFICACION
|
||||
|
||||
Ejemplo 4:
|
||||
HISTORIAL_CONVERSACION:
|
||||
Usuario: Que es el CAT?
|
||||
Agente: El CAT (Costo Anual Total) es un indicador financiero, expresado en un porcentaje anual, que refleja el costo total de un crédito, incluyendo no solo la tasa de interés, sino también todas las comisiones, gastos y otros cobros que genera.
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
Alerta: Se realizó un retiro en efectivo por $100.
|
||||
INTERACCION_USUARIO:
|
||||
y este se aplica solo si dejo de pagar?
|
||||
Clasificación: CONVERSACION
|
||||
|
||||
Ejemplo 5:
|
||||
HISTORIAL_CONVERSACION:
|
||||
Usuario: Cual es la tasa de hipoteca que manejan?
|
||||
Agente: La tasa de una hipoteca depende tanto de factores económicos generales (inflación, tasas de referencia del banco central) como de factores individuales del solicitante (historial crediticio, monto del pago inicial, ingresos, endeudamiento, etc.)
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
Hola, [Alias]: Pasó algo con la captura de tu INE y no se completó tu solicitud de tarjeta de crédito con folio 3421.
|
||||
Parametros: {“solicitud_tarjeta_credito_vigencia”: “12 de septiembre de 2025”, “solicitud_tarjeta_credito_error”: “Error con el formato de la captura”, “solicitud_tarjeta_credito_tipo” : “platinum” }
|
||||
INTERACCION_USUARIO:
|
||||
cual fue el error?
|
||||
Clasificación: NOTIFICACION
|
||||
|
||||
Tarea:
|
||||
HISTORIAL_CONVERSACION:
|
||||
%s
|
||||
INTERRUPCION_NOTIFICACION:
|
||||
%s
|
||||
INTERACCION_USUARIO:
|
||||
%s
|
||||
Clasificación:
|
||||
@@ -0,0 +1,84 @@
|
||||
Eres un agente conversacional de soporte al usuario, amable, servicial y conciso.
|
||||
|
||||
Recibirás cuatro piezas de información:
|
||||
1. HISTORIAL_CONVERSACION: El diálogo previo con el usuario. Úsalo para entender el contexto y evitar repetir información.
|
||||
2. NOTIFICACION: El texto del mensaje que el usuario acaba de recibir.
|
||||
3. METADATOS_NOTIFICACION: Un objeto JSON con datos estructurados relacionados con la notificación. Esta es tu fuente de verdad principal.
|
||||
4. PREGUNTA_USUARIO: La pregunta específica del usuario que debes responder.
|
||||
|
||||
Tu objetivo es sintetizar la información de estas fuentes para dar la respuesta más directa y útil posible.
|
||||
|
||||
**Reglas de Comportamiento:**
|
||||
|
||||
**Proceso Lógico:** Debes seguir este orden de prioridad para encontrar la respuesta:
|
||||
1. Autoridad Principal: Busca la respuesta primero en el objeto METADATOS_NOTIFICACION. Los datos aquí tienen la máxima autoridad.
|
||||
2. Fuente Alternativa: Si la respuesta no está en el objeto METADATOS_NOTIFICACION, busca como alternativa en el texto de HISTORIAL_CONVERSACION los datos que empiecen con el prefijo notification_po_.
|
||||
3. Contexto: Utiliza el HISTORIAL_CONVERSACION únicamente para dar contexto y asegurarte de no repetir algo que ya se dijo
|
||||
|
||||
**Manejo de Datos Faltantes:** Si la respuesta a la PREGUNTA_USUARIO no se encuentra METADATOS_NOTIFICACION ni en el HISTORIAL_CONVERSACION (con el prefijo notification_po_) entonces debes responder exactamente con la palabra DIALOGFLOW.No intentes adivinar ni disculparte
|
||||
**Concisión y Tono:** Tu respuesta debe ser directa, clara y resolver la pregunta. Mantén un tono profesional, amable y servicial.
|
||||
**Idioma:** Responde siempre en el mismo idioma de la PREGUNTA_USUARIO.
|
||||
|
||||
Manejo de Datos Faltantes: Si la respuesta a la PREGUNTA_USUARIO no se encuentra ni en METADATOS_NOTIFICACION ni en el HISTORIAL_CONVERSACION (con el prefijo notification_po_),
|
||||
entonces debes responder exactamente con la palabra DIALOGFLOW.
|
||||
No intentes adivinar ni disculparte.
|
||||
|
||||
Estrategia de Respuesta:
|
||||
Siempre sintetiza la información encontrada en una respuesta completa y conversacional. No devuelvas solo el dato. Utiliza el dato para construir una frase que sea útil y siga el tono. Por ejemplo, si encuentras el dato "30/09/2025", tu respuesta debe ser una frase como "La vigencia de tu solicitud es hasta el 30 de septiembre de 2025." o similar.
|
||||
|
||||
**Ejemplos (Few-Shot Learning):**
|
||||
|
||||
**Ejemplo 1: La respuesta está en los Metadatos**
|
||||
HISTORIAL_CONVERSACION:
|
||||
Usuario: Hola, necesito ayuda con una documentación.
|
||||
Agente: Claro, ¿en qué puedo ayudarte?
|
||||
NOTIFICACION: Hola :Pasó algo con la captura de tu INE y no se completó tu solicitud de tarjeta de crédito con folio ###.¡Reinténtalo cuando quieras! Solo toma en cuenta estos consejos:
|
||||
Presenta tu INE original (no copias ni escaneos).📅Revisa que esté vigente y sin tachaduras.📷 Confirma que la fotografía sea clara.🏠 Asegúrate de que la dirección sea legible.
|
||||
Estamos listos para recibirte.
|
||||
METADATOS_NOTIFICACION: {
|
||||
"parametrosOcultos": {
|
||||
"vigencia": "30/09/2025"
|
||||
}
|
||||
}
|
||||
PREGUNTA_USUARIO: ¿Hasta cuando esta disponible esta solicitud?
|
||||
Respuesta: Tienes hasta el 30 de septiembre de 2025 para revisarlos.
|
||||
|
||||
**Ejemplo 2: Poca Información encontrada en texto de Notificacion *
|
||||
HISTORIAL_CONVERSACION:
|
||||
Usuario: Hola.
|
||||
Agente: ¡Qué onda! Soy Beto, tu asistente virtual de Sigma. ¿Como te puedo ayudar hoy? 🧐
|
||||
NOTIFICACION: Hola :Pasó algo con la captura de tu INE y no se completó tu *solicitud de tarjeta de crédito con folio ###*.
|
||||
¡Reinténtalo cuando quieras! Solo toma en cuenta estos consejos: Presenta tu INE original (no copias ni escaneos)...
|
||||
Estamos listos para recibirte.
|
||||
METADATOS_NOTIFICACION: {
|
||||
"parametrosOcultos": {
|
||||
"vigencia": "30/09/2025"
|
||||
}
|
||||
}
|
||||
PREGUNTA_USUARIO: Mi INE tiene algunas tachaduras y en general esta en mal estado
|
||||
Respuesta: DIALOGFLOW
|
||||
|
||||
**Ejemplo 3: Información no encontrada en ninguna fuente**
|
||||
HISTORIAL_CONVERSACION:
|
||||
Usuario: ¿Cómo van mis trámites?
|
||||
Agente: Veo que tienes una cita de mantenimiento programada.
|
||||
NOTIFICACION: Tu cita para el servicio de mantenimiento ha sido confirmada. Por favor, llega 15 minutos antes.
|
||||
METADATOS_NOTIFICACION: {
|
||||
"tipo_servicio": "mantenimiento rutinario",
|
||||
"ubicacion": "Sucursal Centro",
|
||||
"id_cita": "C-182736"
|
||||
}
|
||||
PREGUNTA_USUARIO: Perfecto, ¿cuál será el costo del mantenimiento?
|
||||
Respuesta: DIALOGFLOW
|
||||
|
||||
Historial de Conversación:
|
||||
%s
|
||||
|
||||
Notificación:
|
||||
%s
|
||||
|
||||
Metadatos de la Notificación:
|
||||
%s
|
||||
|
||||
Pregunta del Usuario:
|
||||
%s
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Capsulas"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Descubre"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Detalle TDC"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Detalle TDD"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Finanzas"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Home"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Inversiones"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Lealtad"}
|
||||
18
src/capa_de_integracion/resources/quick_replies/pagos.json
Normal file
18
src/capa_de_integracion/resources/quick_replies/pagos.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"header": "preguntas frecuentes",
|
||||
"body": "Aquí tienes las preguntas frecuentes que suelen hacernos algunos de nuestros clientes",
|
||||
"button": "Ver",
|
||||
"header_section": "preguntas sobre pagos",
|
||||
"preguntas": [
|
||||
{
|
||||
"titulo": "Donde veo mi historial de pagos?",
|
||||
"descripcion": "View your recent payments",
|
||||
"respuesta": "puedes visualizar esto en la opcion X de tu app"
|
||||
},
|
||||
{
|
||||
"titulo": "Pregunta servicio A",
|
||||
"descripcion": "descripcion servicio A",
|
||||
"respuesta": "puedes ver info de servicio A en tu app"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Prestamos"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Retiro sin tarjeta"}
|
||||
@@ -0,0 +1 @@
|
||||
{"titulo": "Transferencia"}
|
||||
25
src/capa_de_integracion/services/__init__.py
Normal file
25
src/capa_de_integracion/services/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Services module."""
|
||||
|
||||
from .dialogflow_client import DialogflowClientService
|
||||
from .gemini_client import GeminiClientService, GeminiClientException
|
||||
from .conversation_manager import ConversationManagerService
|
||||
from .message_filter import MessageEntryFilter
|
||||
from .notification_manager import NotificationManagerService
|
||||
from .notification_context_resolver import NotificationContextResolver
|
||||
from .dlp_service import DLPService
|
||||
from .llm_response_tuner import LlmResponseTunerService
|
||||
from .mappers import NotificationContextMapper, ConversationContextMapper
|
||||
|
||||
__all__ = [
|
||||
"DialogflowClientService",
|
||||
"GeminiClientService",
|
||||
"GeminiClientException",
|
||||
"ConversationManagerService",
|
||||
"MessageEntryFilter",
|
||||
"NotificationManagerService",
|
||||
"NotificationContextResolver",
|
||||
"DLPService",
|
||||
"LlmResponseTunerService",
|
||||
"NotificationContextMapper",
|
||||
"ConversationContextMapper",
|
||||
]
|
||||
847
src/capa_de_integracion/services/conversation_manager.py
Normal file
847
src/capa_de_integracion/services/conversation_manager.py
Normal file
@@ -0,0 +1,847 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Conversation manager service - central orchestrator for conversations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..config import Settings
|
||||
from ..models import (
|
||||
ExternalConvRequestDTO,
|
||||
DetectIntentRequestDTO,
|
||||
DetectIntentResponseDTO,
|
||||
ConversationSessionDTO,
|
||||
ConversationEntryDTO,
|
||||
QueryInputDTO,
|
||||
TextInputDTO,
|
||||
QueryParamsDTO,
|
||||
)
|
||||
from ..utils import SessionIdGenerator
|
||||
from .dialogflow_client import DialogflowClientService
|
||||
from .redis_service import RedisService
|
||||
from .firestore_service import FirestoreService
|
||||
from .dlp_service import DLPService
|
||||
from .message_filter import MessageEntryFilter
|
||||
from .notification_context_resolver import NotificationContextResolver
|
||||
from .llm_response_tuner import LlmResponseTunerService
|
||||
from .mappers import NotificationContextMapper, ConversationContextMapper
|
||||
from .quick_reply_content import QuickReplyContentService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConversationManagerService:
|
||||
"""
|
||||
Central orchestrator for managing user conversations.
|
||||
|
||||
Integrates Data Loss Prevention (DLP), message classification, routing based
|
||||
on session context (pantallaContexto for quick replies), and hybrid AI logic
|
||||
for notification-driven conversations.
|
||||
|
||||
Routes traffic based on session context:
|
||||
1. If 'pantallaContexto' is present (not stale), delegates to QuickRepliesManagerService
|
||||
2. Otherwise, uses MessageEntryFilter (Gemini) to classify message:
|
||||
a) CONVERSATION: Standard Dialogflow flow with conversation history
|
||||
b) NOTIFICATION: Uses NotificationContextResolver (Gemini) to answer or delegate to Dialogflow
|
||||
|
||||
All conversation turns are persisted using reactive write-back pattern:
|
||||
Redis first (fast), then async to Firestore (persistent).
|
||||
"""
|
||||
|
||||
SESSION_RESET_THRESHOLD_MINUTES = 30
|
||||
SCREEN_CONTEXT_TIMEOUT_MINUTES = 10
|
||||
CONV_HISTORY_PARAM = "conversation_history"
|
||||
HISTORY_PARAM = "historial"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
settings: Settings,
|
||||
dialogflow_client: DialogflowClientService,
|
||||
redis_service: RedisService,
|
||||
firestore_service: FirestoreService,
|
||||
dlp_service: DLPService,
|
||||
message_filter: MessageEntryFilter,
|
||||
notification_context_resolver: NotificationContextResolver,
|
||||
llm_response_tuner: LlmResponseTunerService,
|
||||
):
|
||||
"""Initialize conversation manager."""
|
||||
self.settings = settings
|
||||
self.dialogflow_client = dialogflow_client
|
||||
self.redis_service = redis_service
|
||||
self.firestore_service = firestore_service
|
||||
self.dlp_service = dlp_service
|
||||
self.message_filter = message_filter
|
||||
self.notification_context_resolver = notification_context_resolver
|
||||
self.llm_response_tuner = llm_response_tuner
|
||||
|
||||
# Initialize mappers
|
||||
self.notification_mapper = NotificationContextMapper()
|
||||
self.conversation_mapper = ConversationContextMapper(
|
||||
message_limit=settings.conversation_context_message_limit,
|
||||
days_limit=settings.conversation_context_days_limit,
|
||||
)
|
||||
|
||||
# Quick reply service
|
||||
self.quick_reply_service = QuickReplyContentService(settings)
|
||||
|
||||
logger.info("ConversationManagerService initialized successfully")
|
||||
|
||||
async def manage_conversation(
|
||||
self, request: ExternalConvRequestDTO
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Main entry point for managing conversations.
|
||||
|
||||
Flow:
|
||||
1. Obfuscate message with DLP
|
||||
2. Check for pantallaContexto (quick replies mode)
|
||||
3. If no pantallaContexto, continue with standard flow
|
||||
4. Classify message (CONVERSATION vs NOTIFICATION)
|
||||
5. Route to appropriate handler
|
||||
|
||||
Args:
|
||||
request: External conversation request from client
|
||||
|
||||
Returns:
|
||||
Detect intent response from Dialogflow
|
||||
"""
|
||||
try:
|
||||
# Step 1: DLP obfuscation
|
||||
obfuscated_message = await self.dlp_service.get_obfuscated_string(
|
||||
request.mensaje,
|
||||
self.settings.dlp_template_complete_flow,
|
||||
)
|
||||
|
||||
obfuscated_request = ExternalConvRequestDTO(
|
||||
mensaje=obfuscated_message,
|
||||
usuario=request.usuario,
|
||||
canal=request.canal,
|
||||
tipo=request.tipo,
|
||||
pantalla_contexto=request.pantalla_contexto,
|
||||
)
|
||||
|
||||
# Step 2: Check for pantallaContexto in existing session
|
||||
telefono = request.usuario.telefono
|
||||
existing_session = await self.redis_service.get_session(telefono)
|
||||
|
||||
if existing_session and existing_session.pantallaContexto:
|
||||
# Check if pantallaContexto is stale (10 minutes)
|
||||
if self._is_pantalla_context_valid(existing_session):
|
||||
logger.info(
|
||||
f"Detected 'pantallaContexto' in session: {existing_session.pantallaContexto}. "
|
||||
f"Delegating to QuickReplies flow."
|
||||
)
|
||||
return await self._manage_quick_reply_conversation(
|
||||
obfuscated_request, existing_session
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Detected STALE 'pantallaContexto'. Ignoring and proceeding with normal flow."
|
||||
)
|
||||
|
||||
# Step 3: Continue with standard conversation flow
|
||||
return await self._continue_managing_conversation(obfuscated_request)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error managing conversation: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
||||
def _is_pantalla_context_valid(self, session: ConversationSessionDTO) -> bool:
|
||||
"""Check if pantallaContexto is still valid (not stale)."""
|
||||
if not session.lastModified:
|
||||
return False
|
||||
|
||||
time_diff = datetime.now() - session.lastModified
|
||||
return time_diff < timedelta(minutes=self.SCREEN_CONTEXT_TIMEOUT_MINUTES)
|
||||
|
||||
async def _manage_quick_reply_conversation(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
session: ConversationSessionDTO,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Handle conversation within Quick Replies context.
|
||||
|
||||
User is in a quick reply screen, treat their message as a FAQ query.
|
||||
|
||||
Args:
|
||||
request: External request
|
||||
session: Existing session with pantallaContexto
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
# Build Dialogflow request with pantallaContexto
|
||||
dialogflow_request = self._build_dialogflow_request(
|
||||
request, session, request.mensaje
|
||||
)
|
||||
|
||||
# Add pantallaContexto to parameters
|
||||
if dialogflow_request.query_params:
|
||||
dialogflow_request.query_params.parameters["pantalla_contexto"] = (
|
||||
session.pantallaContexto
|
||||
)
|
||||
|
||||
# Call Dialogflow
|
||||
response = await self.dialogflow_client.detect_intent(
|
||||
session.sessionId, dialogflow_request
|
||||
)
|
||||
|
||||
# Persist conversation turn
|
||||
await self._persist_conversation_turn(session, request.mensaje, response)
|
||||
|
||||
return response
|
||||
|
||||
async def _continue_managing_conversation(
|
||||
self, request: ExternalConvRequestDTO
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Continue with standard conversation flow.
|
||||
|
||||
Steps:
|
||||
1. Get or create session
|
||||
2. Check for active notifications
|
||||
3. Classify message (CONVERSATION vs NOTIFICATION)
|
||||
4. Route to appropriate handler
|
||||
|
||||
Args:
|
||||
request: External conversation request
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
telefono = request.usuario.telefono
|
||||
nickname = (
|
||||
request.usuario.nickname if hasattr(request.usuario, "nickname") else None
|
||||
)
|
||||
|
||||
if not telefono or not telefono.strip():
|
||||
raise ValueError("Phone number is required to manage conversation sessions")
|
||||
|
||||
logger.info(f"Primary Check (Redis): Looking up session for phone: {telefono}")
|
||||
|
||||
# Get session from Redis
|
||||
session = await self.redis_service.get_session(telefono)
|
||||
|
||||
if session:
|
||||
return await self._handle_message_classification(request, session)
|
||||
else:
|
||||
# No session in Redis, check Firestore
|
||||
logger.info(
|
||||
"No session found in Redis. Performing full lookup to Firestore."
|
||||
)
|
||||
return await self._full_lookup_and_process(request, telefono, nickname)
|
||||
|
||||
async def _handle_message_classification(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
session: ConversationSessionDTO,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Classify message using MessageEntryFilter and route accordingly.
|
||||
|
||||
Checks for active notifications and uses Gemini to determine if the
|
||||
user's message is about the notification or general conversation.
|
||||
|
||||
Args:
|
||||
request: External request
|
||||
session: Existing conversation session
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
telefono = request.usuario.telefono
|
||||
user_message = request.mensaje
|
||||
|
||||
# Get active notification for this phone
|
||||
notification_id = await self.redis_service.get_notification_id_for_phone(
|
||||
telefono
|
||||
)
|
||||
|
||||
if not notification_id:
|
||||
# No notification, proceed with standard conversation
|
||||
return await self._proceed_with_conversation(request, session)
|
||||
|
||||
# Get notification session
|
||||
notification_session = await self.redis_service.get_notification_session(
|
||||
notification_id
|
||||
)
|
||||
|
||||
if not notification_session or not notification_session.notificaciones:
|
||||
return await self._proceed_with_conversation(request, session)
|
||||
|
||||
# Find most recent active notification
|
||||
active_notification = None
|
||||
for notif in sorted(
|
||||
notification_session.notificaciones,
|
||||
key=lambda n: n.timestampCreacion,
|
||||
reverse=True,
|
||||
):
|
||||
if notif.status == "active":
|
||||
active_notification = notif
|
||||
break
|
||||
|
||||
if not active_notification:
|
||||
return await self._proceed_with_conversation(request, session)
|
||||
|
||||
# Get conversation history from Redis (fast in-memory cache)
|
||||
messages_data = await self.redis_service.get_messages(session.sessionId)
|
||||
# Convert message dicts to ConversationEntryDTO objects
|
||||
conversation_entries = [
|
||||
ConversationEntryDTO.model_validate(msg) for msg in messages_data
|
||||
]
|
||||
conversation_history = self.conversation_mapper.to_text_from_entries(
|
||||
conversation_entries
|
||||
)
|
||||
if not conversation_history:
|
||||
conversation_history = ""
|
||||
|
||||
# Classify message using MessageEntryFilter (Gemini)
|
||||
notification_text = self.notification_mapper.to_text(active_notification)
|
||||
classification = await self.message_filter.classify_message(
|
||||
query_input_text=user_message,
|
||||
notifications_json=notification_text,
|
||||
conversation_json=conversation_history,
|
||||
)
|
||||
|
||||
logger.info(f"Message classified as: {classification}")
|
||||
|
||||
if classification == self.message_filter.CATEGORY_NOTIFICATION:
|
||||
# Route to notification conversation flow
|
||||
return await self._start_notification_conversation(
|
||||
request, active_notification, session, conversation_entries
|
||||
)
|
||||
else:
|
||||
# Route to standard conversation flow
|
||||
return await self._proceed_with_conversation(request, session)
|
||||
|
||||
async def _proceed_with_conversation(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
session: ConversationSessionDTO,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Proceed with standard Dialogflow conversation.
|
||||
|
||||
Checks session age:
|
||||
- If < 30 minutes: Continue with existing session
|
||||
- If >= 30 minutes: Create new session and inject conversation history
|
||||
|
||||
Args:
|
||||
request: External request
|
||||
session: Existing session
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
datetime.now()
|
||||
|
||||
# Check session age
|
||||
if self._is_session_valid(session):
|
||||
logger.info(
|
||||
f"Recent Session Found: Session {session.sessionId} is within "
|
||||
f"the {self.SESSION_RESET_THRESHOLD_MINUTES}-minute threshold. "
|
||||
f"Proceeding to Dialogflow."
|
||||
)
|
||||
return await self._process_dialogflow_request(
|
||||
session, request, is_new_session=False
|
||||
)
|
||||
else:
|
||||
# Session expired, create new session with history injection
|
||||
logger.info(
|
||||
f"Old Session Found: Session {session.sessionId} is older than "
|
||||
f"the {self.SESSION_RESET_THRESHOLD_MINUTES}-minute threshold."
|
||||
)
|
||||
|
||||
# Create new session
|
||||
new_session_id = SessionIdGenerator.generate()
|
||||
telefono = request.usuario.telefono
|
||||
nickname = (
|
||||
request.usuario.nickname
|
||||
if hasattr(request.usuario, "nickname")
|
||||
else None
|
||||
)
|
||||
user_id = nickname or telefono
|
||||
|
||||
new_session = ConversationSessionDTO.create(
|
||||
session_id=new_session_id,
|
||||
user_id=user_id,
|
||||
telefono=telefono,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Creating new session {new_session_id} from old session "
|
||||
f"{session.sessionId} due to timeout."
|
||||
)
|
||||
|
||||
# Get conversation history from old session
|
||||
old_entries = await self.firestore_service.get_entries(
|
||||
session.sessionId,
|
||||
limit=self.settings.conversation_context_message_limit,
|
||||
)
|
||||
|
||||
# Apply limits (30 days / 60 messages / 50KB)
|
||||
conversation_history = self.conversation_mapper.to_text_with_limits(
|
||||
session, old_entries
|
||||
)
|
||||
|
||||
# Build request with history parameter
|
||||
dialogflow_request = self._build_dialogflow_request(
|
||||
request, new_session, request.mensaje
|
||||
)
|
||||
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
|
||||
conversation_history
|
||||
)
|
||||
|
||||
return await self._process_dialogflow_request(
|
||||
new_session,
|
||||
request,
|
||||
is_new_session=True,
|
||||
dialogflow_request=dialogflow_request,
|
||||
)
|
||||
|
||||
async def _start_notification_conversation(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
notification: any,
|
||||
session: ConversationSessionDTO,
|
||||
conversation_entries: list[ConversationEntryDTO],
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Start notification-driven conversation.
|
||||
|
||||
Uses NotificationContextResolver (Gemini) to determine if the question
|
||||
can be answered directly from notification metadata or should be
|
||||
delegated to Dialogflow.
|
||||
|
||||
Args:
|
||||
request: External request
|
||||
notification: Active notification
|
||||
session: Conversation session
|
||||
conversation_entries: Recent conversation history
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
user_message = request.mensaje
|
||||
telefono = request.usuario.telefono
|
||||
|
||||
# Prepare context for NotificationContextResolver
|
||||
self.notification_mapper.to_text(notification)
|
||||
notification_json = self.notification_mapper.to_json(notification)
|
||||
conversation_history = self.conversation_mapper.to_text_from_entries(
|
||||
conversation_entries
|
||||
)
|
||||
|
||||
# Convert notification parameters to metadata string
|
||||
# Filter to only include parameters starting with "notification_po_"
|
||||
metadata = ""
|
||||
if notification.parametros:
|
||||
import json
|
||||
|
||||
filtered_params = {
|
||||
key: value
|
||||
for key, value in notification.parametros.items()
|
||||
if key.startswith("notification_po_")
|
||||
}
|
||||
metadata = json.dumps(filtered_params, ensure_ascii=False)
|
||||
|
||||
# Resolve context using Gemini
|
||||
resolution = await self.notification_context_resolver.resolve_context(
|
||||
query_input_text=user_message,
|
||||
notifications_json=notification_json,
|
||||
conversation_json=conversation_history,
|
||||
metadata=metadata,
|
||||
user_id=session.userId,
|
||||
session_id=session.sessionId,
|
||||
user_phone_number=telefono,
|
||||
)
|
||||
|
||||
if resolution == self.notification_context_resolver.CATEGORY_DIALOGFLOW:
|
||||
# Delegate to Dialogflow
|
||||
logger.info(
|
||||
"NotificationContextResolver returned DIALOGFLOW. Sending to Dialogflow."
|
||||
)
|
||||
|
||||
dialogflow_request = self._build_dialogflow_request(
|
||||
request, session, user_message
|
||||
)
|
||||
|
||||
# Check if session is older than 30 minutes
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
time_diff = datetime.now() - session.lastModified
|
||||
if time_diff >= timedelta(minutes=self.SESSION_RESET_THRESHOLD_MINUTES):
|
||||
# Session is old, inject conversation history
|
||||
logger.info(
|
||||
f"Session is older than {self.SESSION_RESET_THRESHOLD_MINUTES} minutes. "
|
||||
"Injecting conversation history."
|
||||
)
|
||||
# Get conversation history with limits
|
||||
firestore_entries = await self.firestore_service.get_entries(
|
||||
session.sessionId
|
||||
)
|
||||
conversation_history = self.conversation_mapper.to_text_with_limits(
|
||||
session, firestore_entries
|
||||
)
|
||||
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
|
||||
conversation_history
|
||||
)
|
||||
|
||||
# Always add notification parameters
|
||||
if notification.parametros:
|
||||
dialogflow_request.query_params.parameters.update(notification.parametros)
|
||||
|
||||
response = await self.dialogflow_client.detect_intent(
|
||||
session.sessionId, dialogflow_request
|
||||
)
|
||||
|
||||
await self._persist_conversation_turn(session, user_message, response)
|
||||
return response
|
||||
else:
|
||||
# LLM provided direct answer
|
||||
logger.info(
|
||||
"NotificationContextResolver provided direct answer. Storing in Redis."
|
||||
)
|
||||
|
||||
# Store LLM response in Redis with UUID
|
||||
llm_uuid = str(uuid.uuid4())
|
||||
await self.llm_response_tuner.set_value(llm_uuid, resolution)
|
||||
|
||||
# Send LLM_RESPONSE_PROCESSED event to Dialogflow
|
||||
event_params = {"uuid": llm_uuid}
|
||||
|
||||
response = await self.dialogflow_client.detect_intent_event(
|
||||
session_id=session.sessionId,
|
||||
event_name="LLM_RESPONSE_PROCESSED",
|
||||
parameters=event_params,
|
||||
language_code=self.settings.dialogflow_default_language,
|
||||
)
|
||||
|
||||
# Persist LLM turn
|
||||
await self._persist_llm_turn(session, user_message, resolution)
|
||||
|
||||
return response
|
||||
|
||||
async def _full_lookup_and_process(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
telefono: str,
|
||||
nickname: str | None,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Perform full lookup from Firestore and process conversation.
|
||||
|
||||
Called when session is not found in Redis.
|
||||
|
||||
Args:
|
||||
request: External request
|
||||
telefono: User phone number
|
||||
nickname: User nickname
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
# Try Firestore (by phone number)
|
||||
session = await self.firestore_service.get_session_by_phone(telefono)
|
||||
|
||||
if session:
|
||||
# Get conversation history
|
||||
old_entries = await self.firestore_service.get_entries(
|
||||
session.sessionId,
|
||||
limit=self.settings.conversation_context_message_limit,
|
||||
)
|
||||
|
||||
# Create new session with history injection
|
||||
new_session_id = SessionIdGenerator.generate()
|
||||
user_id = nickname or telefono
|
||||
|
||||
new_session = ConversationSessionDTO.create(
|
||||
session_id=new_session_id,
|
||||
user_id=user_id,
|
||||
telefono=telefono,
|
||||
)
|
||||
|
||||
logger.info(f"Creating new session {new_session_id} after full lookup.")
|
||||
|
||||
# Apply history limits
|
||||
conversation_history = self.conversation_mapper.to_text_with_limits(
|
||||
session, old_entries
|
||||
)
|
||||
|
||||
# Build request with history
|
||||
dialogflow_request = self._build_dialogflow_request(
|
||||
request, new_session, request.mensaje
|
||||
)
|
||||
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
|
||||
conversation_history
|
||||
)
|
||||
|
||||
return await self._process_dialogflow_request(
|
||||
new_session,
|
||||
request,
|
||||
is_new_session=True,
|
||||
dialogflow_request=dialogflow_request,
|
||||
)
|
||||
else:
|
||||
# No session found, create brand new session
|
||||
logger.info(
|
||||
f"No existing session found for {telefono}. Creating new session."
|
||||
)
|
||||
return await self._create_new_session_and_process(
|
||||
request, telefono, nickname
|
||||
)
|
||||
|
||||
async def _create_new_session_and_process(
|
||||
self,
|
||||
request: ExternalConvRequestDTO,
|
||||
telefono: str,
|
||||
nickname: str | None,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""Create brand new session and process request."""
|
||||
session_id = SessionIdGenerator.generate()
|
||||
user_id = nickname or telefono
|
||||
|
||||
session = ConversationSessionDTO.create(
|
||||
session_id=session_id,
|
||||
user_id=user_id,
|
||||
telefono=telefono,
|
||||
)
|
||||
|
||||
# Save to Redis and Firestore
|
||||
await self.redis_service.save_session(session)
|
||||
await self.firestore_service.save_session(session)
|
||||
|
||||
logger.info(f"Created new session: {session_id} for phone: {telefono}")
|
||||
|
||||
return await self._process_dialogflow_request(
|
||||
session, request, is_new_session=True
|
||||
)
|
||||
|
||||
async def _process_dialogflow_request(
|
||||
self,
|
||||
session: ConversationSessionDTO,
|
||||
request: ExternalConvRequestDTO,
|
||||
is_new_session: bool,
|
||||
dialogflow_request: DetectIntentRequestDTO | None = None,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Process Dialogflow request and persist conversation turn.
|
||||
|
||||
Args:
|
||||
session: Conversation session
|
||||
request: External request
|
||||
is_new_session: Whether this is a new session
|
||||
dialogflow_request: Pre-built Dialogflow request (optional)
|
||||
|
||||
Returns:
|
||||
Dialogflow response
|
||||
"""
|
||||
# Build request if not provided
|
||||
if not dialogflow_request:
|
||||
dialogflow_request = self._build_dialogflow_request(
|
||||
request, session, request.mensaje
|
||||
)
|
||||
|
||||
# Call Dialogflow
|
||||
response = await self.dialogflow_client.detect_intent(
|
||||
session.sessionId, dialogflow_request
|
||||
)
|
||||
|
||||
# Persist conversation turn
|
||||
await self._persist_conversation_turn(session, request.mensaje, response)
|
||||
|
||||
logger.info(
|
||||
f"Successfully processed conversation for session: {session.sessionId}"
|
||||
)
|
||||
return response
|
||||
|
||||
def _is_session_valid(self, session: ConversationSessionDTO) -> bool:
|
||||
"""Check if session is within 30-minute threshold."""
|
||||
if not session.lastModified:
|
||||
return False
|
||||
|
||||
time_diff = datetime.now() - session.lastModified
|
||||
return time_diff < timedelta(minutes=self.SESSION_RESET_THRESHOLD_MINUTES)
|
||||
|
||||
def _build_dialogflow_request(
|
||||
self,
|
||||
external_request: ExternalConvRequestDTO,
|
||||
session: ConversationSessionDTO,
|
||||
message: str,
|
||||
) -> DetectIntentRequestDTO:
|
||||
"""Build Dialogflow detect intent request."""
|
||||
# Build query input
|
||||
query_input = QueryInputDTO(
|
||||
text=TextInputDTO(text=message),
|
||||
language_code=self.settings.dialogflow_default_language,
|
||||
)
|
||||
|
||||
# Build query parameters with session context
|
||||
parameters = {
|
||||
"telefono": session.telefono,
|
||||
"usuario_id": session.userId,
|
||||
}
|
||||
|
||||
# Add pantalla_contexto if present
|
||||
if session.pantallaContexto:
|
||||
parameters["pantalla_contexto"] = session.pantallaContexto
|
||||
|
||||
query_params = QueryParamsDTO(parameters=parameters)
|
||||
|
||||
return DetectIntentRequestDTO(
|
||||
query_input=query_input,
|
||||
query_params=query_params,
|
||||
)
|
||||
|
||||
async def _persist_conversation_turn(
|
||||
self,
|
||||
session: ConversationSessionDTO,
|
||||
user_message: str,
|
||||
response: DetectIntentResponseDTO,
|
||||
) -> None:
|
||||
"""
|
||||
Persist conversation turn using reactive write-back pattern.
|
||||
Saves to Redis first, then async to Firestore.
|
||||
"""
|
||||
try:
|
||||
# Update session with last message
|
||||
updated_session = ConversationSessionDTO(
|
||||
**session.model_dump(),
|
||||
lastMessage=user_message,
|
||||
lastModified=datetime.now(),
|
||||
)
|
||||
|
||||
# Create conversation entry
|
||||
response_text = ""
|
||||
intent = None
|
||||
parameters = None
|
||||
|
||||
if response.queryResult:
|
||||
response_text = response.queryResult.text or ""
|
||||
intent = response.queryResult.intent
|
||||
parameters = response.queryResult.parameters
|
||||
|
||||
user_entry = ConversationEntryDTO(
|
||||
entity="USUARIO",
|
||||
type="CONVERSACION",
|
||||
timestamp=datetime.now(),
|
||||
text=user_message,
|
||||
parameters=None,
|
||||
intent=None,
|
||||
)
|
||||
|
||||
agent_entry = ConversationEntryDTO(
|
||||
entity="AGENTE",
|
||||
type="CONVERSACION",
|
||||
timestamp=datetime.now(),
|
||||
text=response_text,
|
||||
parameters=parameters,
|
||||
intent=intent,
|
||||
)
|
||||
|
||||
# Save to Redis (fast, blocking)
|
||||
await self.redis_service.save_session(updated_session)
|
||||
await self.redis_service.save_message(session.sessionId, user_entry)
|
||||
await self.redis_service.save_message(session.sessionId, agent_entry)
|
||||
|
||||
# Save to Firestore (persistent, non-blocking write-back)
|
||||
import asyncio
|
||||
|
||||
async def save_to_firestore():
|
||||
try:
|
||||
await self.firestore_service.save_session(updated_session)
|
||||
await self.firestore_service.save_entry(session.sessionId, user_entry)
|
||||
await self.firestore_service.save_entry(session.sessionId, agent_entry)
|
||||
logger.debug(
|
||||
f"Asynchronously (Write-Back): Entry successfully saved to Firestore for session: {session.sessionId}"
|
||||
)
|
||||
except Exception as fs_error:
|
||||
logger.error(
|
||||
f"Asynchronously (Write-Back): Failed to save to Firestore for session {session.sessionId}: {str(fs_error)}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# Fire and forget - don't await
|
||||
asyncio.create_task(save_to_firestore())
|
||||
|
||||
logger.debug(f"Entry saved to Redis for session: {session.sessionId}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error persisting conversation turn for session {session.sessionId}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
# Don't fail the request if persistence fails
|
||||
|
||||
async def _persist_llm_turn(
|
||||
self,
|
||||
session: ConversationSessionDTO,
|
||||
user_message: str,
|
||||
llm_response: str,
|
||||
) -> None:
|
||||
"""Persist LLM-generated conversation turn."""
|
||||
try:
|
||||
# Update session
|
||||
updated_session = ConversationSessionDTO(
|
||||
**session.model_dump(),
|
||||
lastMessage=user_message,
|
||||
lastModified=datetime.now(),
|
||||
)
|
||||
|
||||
user_entry = ConversationEntryDTO(
|
||||
entity="USUARIO",
|
||||
type="CONVERSACION",
|
||||
timestamp=datetime.now(),
|
||||
text=user_message,
|
||||
parameters=notification.parametros,
|
||||
intent=None,
|
||||
)
|
||||
|
||||
llm_entry = ConversationEntryDTO(
|
||||
entity="LLM",
|
||||
type="LLM",
|
||||
timestamp=datetime.now(),
|
||||
text=llm_response,
|
||||
parameters=None,
|
||||
intent=None,
|
||||
)
|
||||
|
||||
# Save to Redis (fast, blocking)
|
||||
await self.redis_service.save_session(updated_session)
|
||||
await self.redis_service.save_message(session.sessionId, user_entry)
|
||||
await self.redis_service.save_message(session.sessionId, llm_entry)
|
||||
|
||||
# Save to Firestore (persistent, non-blocking write-back)
|
||||
import asyncio
|
||||
|
||||
async def save_to_firestore():
|
||||
try:
|
||||
await self.firestore_service.save_session(updated_session)
|
||||
await self.firestore_service.save_entry(session.sessionId, user_entry)
|
||||
await self.firestore_service.save_entry(session.sessionId, llm_entry)
|
||||
logger.debug(
|
||||
f"Asynchronously (Write-Back): LLM entry successfully saved to Firestore for session: {session.sessionId}"
|
||||
)
|
||||
except Exception as fs_error:
|
||||
logger.error(
|
||||
f"Asynchronously (Write-Back): Failed to save LLM entry to Firestore for session {session.sessionId}: {str(fs_error)}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# Fire and forget - don't await
|
||||
asyncio.create_task(save_to_firestore())
|
||||
|
||||
logger.debug(f"LLM entry saved to Redis for session: {session.sessionId}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error persisting LLM turn for session {session.sessionId}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
133
src/capa_de_integracion/services/data_purge.py
Normal file
133
src/capa_de_integracion/services/data_purge.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Data purge service for Redis and Firestore.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from google.cloud import firestore
|
||||
|
||||
from ..config import Settings
|
||||
from .redis_service import RedisService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DataPurgeService:
|
||||
"""Service for purging all data from Redis and Firestore."""
|
||||
|
||||
def __init__(self, settings: Settings, redis_service: RedisService):
|
||||
"""Initialize data purge service."""
|
||||
self.settings = settings
|
||||
self.redis_service = redis_service
|
||||
self.db = firestore.AsyncClient(
|
||||
project=settings.gcp_project_id,
|
||||
database=settings.firestore_database_id,
|
||||
)
|
||||
|
||||
async def purge_all_data(self) -> None:
|
||||
"""Purge all data from Redis and Firestore."""
|
||||
try:
|
||||
await self._purge_redis()
|
||||
await self._purge_firestore()
|
||||
logger.info("Successfully purged all data from Redis and Firestore")
|
||||
except Exception as e:
|
||||
logger.error(f"Error purging data: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _purge_redis(self) -> None:
|
||||
"""Purge all data from Redis."""
|
||||
logger.info("Starting Redis data purge")
|
||||
try:
|
||||
if not self.redis_service.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
await self.redis_service.redis.flushdb()
|
||||
logger.info("Successfully purged all data from Redis")
|
||||
except Exception as e:
|
||||
logger.error(f"Error purging data from Redis: {str(e)}", exc_info=True)
|
||||
raise
|
||||
|
||||
async def _purge_firestore(self) -> None:
|
||||
"""Purge all data from Firestore."""
|
||||
logger.info("Starting Firestore data purge")
|
||||
try:
|
||||
app_id = self.settings.gcp_project_id
|
||||
conversations_path = f"artifacts/{app_id}/conversations"
|
||||
notifications_path = f"artifacts/{app_id}/notifications"
|
||||
|
||||
# Delete mensajes subcollections from conversations
|
||||
logger.info(
|
||||
f"Deleting 'mensajes' sub-collections from '{conversations_path}'"
|
||||
)
|
||||
try:
|
||||
conversations_ref = self.db.collection(conversations_path)
|
||||
async for doc in conversations_ref.stream():
|
||||
mensajes_ref = doc.reference.collection("mensajes")
|
||||
await self._delete_collection(mensajes_ref, 50)
|
||||
except Exception as e:
|
||||
if "NOT_FOUND" in str(e):
|
||||
logger.warning(
|
||||
f"Collection '{conversations_path}' not found, skipping"
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
# Delete conversations collection
|
||||
logger.info(f"Deleting collection: {conversations_path}")
|
||||
try:
|
||||
conversations_ref = self.db.collection(conversations_path)
|
||||
await self._delete_collection(conversations_ref, 50)
|
||||
except Exception as e:
|
||||
if "NOT_FOUND" in str(e):
|
||||
logger.warning(
|
||||
f"Collection '{conversations_path}' not found, skipping"
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
# Delete notifications collection
|
||||
logger.info(f"Deleting collection: {notifications_path}")
|
||||
try:
|
||||
notifications_ref = self.db.collection(notifications_path)
|
||||
await self._delete_collection(notifications_ref, 50)
|
||||
except Exception as e:
|
||||
if "NOT_FOUND" in str(e):
|
||||
logger.warning(
|
||||
f"Collection '{notifications_path}' not found, skipping"
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
logger.info("Successfully purged Firestore collections")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error purging Firestore collections: {str(e)}", exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
async def _delete_collection(self, coll_ref, batch_size: int) -> None:
|
||||
"""Delete a Firestore collection in batches."""
|
||||
docs = []
|
||||
async for doc in coll_ref.limit(batch_size).stream():
|
||||
docs.append(doc)
|
||||
|
||||
if not docs:
|
||||
return
|
||||
|
||||
# Delete documents in batch
|
||||
batch = self.db.batch()
|
||||
for doc in docs:
|
||||
batch.delete(doc.reference)
|
||||
await batch.commit()
|
||||
|
||||
# Recursively delete remaining documents
|
||||
if len(docs) == batch_size:
|
||||
await self._delete_collection(coll_ref, batch_size)
|
||||
|
||||
async def close(self):
|
||||
"""Close Firestore client."""
|
||||
await self.db.close()
|
||||
285
src/capa_de_integracion/services/dialogflow_client.py
Normal file
285
src/capa_de_integracion/services/dialogflow_client.py
Normal file
@@ -0,0 +1,285 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Dialogflow CX client service for intent detection.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from google.cloud.dialogflowcx_v3 import SessionsAsyncClient
|
||||
from google.cloud.dialogflowcx_v3.types import (
|
||||
DetectIntentRequest,
|
||||
QueryInput,
|
||||
TextInput,
|
||||
EventInput,
|
||||
QueryParameters,
|
||||
)
|
||||
from google.api_core.exceptions import (
|
||||
GoogleAPIError,
|
||||
InternalServerError,
|
||||
ServiceUnavailable,
|
||||
)
|
||||
from tenacity import (
|
||||
retry,
|
||||
stop_after_attempt,
|
||||
wait_exponential,
|
||||
retry_if_exception_type,
|
||||
)
|
||||
|
||||
from ..config import Settings
|
||||
from ..models import DetectIntentRequestDTO, DetectIntentResponseDTO, QueryResultDTO
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DialogflowClientService:
|
||||
"""Service for interacting with Dialogflow CX API."""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""Initialize Dialogflow client."""
|
||||
self.settings = settings
|
||||
self.project_id = settings.dialogflow_project_id
|
||||
self.location = settings.dialogflow_location
|
||||
self.agent_id = settings.dialogflow_agent_id
|
||||
self.default_language = settings.dialogflow_default_language
|
||||
|
||||
# Initialize async client
|
||||
endpoint = settings.dialogflow_endpoint
|
||||
client_options = {"api_endpoint": endpoint}
|
||||
self.client = SessionsAsyncClient(client_options=client_options)
|
||||
|
||||
logger.info(
|
||||
f"Dialogflow CX SessionsClient initialized for endpoint: {endpoint}"
|
||||
)
|
||||
logger.info(f"Agent ID: {self.agent_id}")
|
||||
|
||||
def _build_session_path(self, session_id: str) -> str:
|
||||
"""Build Dialogflow session path."""
|
||||
return self.client.session_path(
|
||||
project=self.project_id,
|
||||
location=self.location,
|
||||
agent=self.agent_id,
|
||||
session=session_id,
|
||||
)
|
||||
|
||||
def _map_query_input(self, query_input_dto) -> QueryInput:
|
||||
"""Map QueryInputDTO to Dialogflow QueryInput."""
|
||||
language_code = query_input_dto.language_code or self.default_language
|
||||
|
||||
if query_input_dto.text and query_input_dto.text.text:
|
||||
return QueryInput(
|
||||
text=TextInput(text=query_input_dto.text.text),
|
||||
language_code=language_code,
|
||||
)
|
||||
elif query_input_dto.event and query_input_dto.event.event:
|
||||
return QueryInput(
|
||||
event=EventInput(event=query_input_dto.event.event),
|
||||
language_code=language_code,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Either text or event input must be provided")
|
||||
|
||||
def _map_query_params(self, query_params_dto) -> QueryParameters | None:
|
||||
"""Map QueryParamsDTO to Dialogflow QueryParameters."""
|
||||
if not query_params_dto or not query_params_dto.parameters:
|
||||
return None
|
||||
|
||||
return QueryParameters(parameters=query_params_dto.parameters)
|
||||
|
||||
def _extract_response_text(self, response) -> str:
|
||||
"""Extract text from Dialogflow response messages."""
|
||||
texts = []
|
||||
for msg in response.query_result.response_messages:
|
||||
if hasattr(msg, "text") and msg.text.text:
|
||||
texts.extend(msg.text.text)
|
||||
return " ".join(texts) if texts else ""
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(3),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=10),
|
||||
retry=retry_if_exception_type((InternalServerError, ServiceUnavailable)),
|
||||
reraise=True,
|
||||
)
|
||||
async def detect_intent(
|
||||
self, session_id: str, request_dto: DetectIntentRequestDTO
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Detect intent from user input using Dialogflow CX.
|
||||
|
||||
Args:
|
||||
session_id: Unique session identifier
|
||||
request_dto: Detect intent request
|
||||
|
||||
Returns:
|
||||
Detect intent response with query results
|
||||
|
||||
Raises:
|
||||
GoogleAPIError: If Dialogflow API call fails
|
||||
"""
|
||||
if not session_id:
|
||||
raise ValueError("Session ID cannot be empty")
|
||||
if not request_dto:
|
||||
raise ValueError("Request DTO cannot be None")
|
||||
|
||||
logger.info(f"Initiating detectIntent for session: {session_id}")
|
||||
|
||||
try:
|
||||
# Build request
|
||||
session_path = self._build_session_path(session_id)
|
||||
query_input = self._map_query_input(request_dto.query_input)
|
||||
query_params = self._map_query_params(request_dto.query_params)
|
||||
|
||||
detect_request = DetectIntentRequest(
|
||||
session=session_path,
|
||||
query_input=query_input,
|
||||
query_params=query_params,
|
||||
)
|
||||
|
||||
# Call Dialogflow
|
||||
logger.debug(
|
||||
f"Calling Dialogflow CX detectIntent for session: {session_id}"
|
||||
)
|
||||
response = await self.client.detect_intent(request=detect_request)
|
||||
|
||||
# Extract response data
|
||||
query_result = response.query_result
|
||||
response_text = self._extract_response_text(response)
|
||||
|
||||
# Map to DTO
|
||||
query_result_dto = QueryResultDTO(
|
||||
responseText=response_text,
|
||||
parameters=dict(query_result.parameters)
|
||||
if query_result.parameters
|
||||
else None,
|
||||
)
|
||||
|
||||
result = DetectIntentResponseDTO(
|
||||
responseId=response.response_id,
|
||||
queryResult=query_result_dto,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Successfully processed detectIntent for session: {session_id}"
|
||||
)
|
||||
return result
|
||||
|
||||
except GoogleAPIError as e:
|
||||
logger.error(
|
||||
f"Dialogflow CX API error for session {session_id}: {e.message}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Unexpected error in detectIntent for session {session_id}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(3),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=10),
|
||||
retry=retry_if_exception_type((InternalServerError, ServiceUnavailable)),
|
||||
reraise=True,
|
||||
)
|
||||
async def detect_intent_event(
|
||||
self,
|
||||
session_id: str,
|
||||
event_name: str,
|
||||
parameters: dict | None = None,
|
||||
language_code: str | None = None,
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Trigger Dialogflow event detection.
|
||||
|
||||
Used for notification events and system-triggered flows.
|
||||
|
||||
Args:
|
||||
session_id: Unique session identifier
|
||||
event_name: Dialogflow event name (e.g., "notificacion")
|
||||
parameters: Event parameters
|
||||
language_code: Language code (defaults to settings)
|
||||
|
||||
Returns:
|
||||
Detect intent response
|
||||
|
||||
Raises:
|
||||
GoogleAPIError: If Dialogflow API call fails
|
||||
"""
|
||||
if not session_id:
|
||||
raise ValueError("Session ID cannot be empty")
|
||||
if not event_name:
|
||||
raise ValueError("Event name cannot be empty")
|
||||
|
||||
lang_code = language_code or self.default_language
|
||||
|
||||
logger.info(
|
||||
f"Triggering Dialogflow event '{event_name}' for session: {session_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Build request
|
||||
session_path = self._build_session_path(session_id)
|
||||
|
||||
query_input = QueryInput(
|
||||
event=EventInput(event=event_name),
|
||||
language_code=lang_code,
|
||||
)
|
||||
|
||||
query_params = None
|
||||
if parameters:
|
||||
query_params = QueryParameters(parameters=parameters)
|
||||
|
||||
detect_request = DetectIntentRequest(
|
||||
session=session_path,
|
||||
query_input=query_input,
|
||||
query_params=query_params,
|
||||
)
|
||||
|
||||
# Call Dialogflow
|
||||
logger.debug(
|
||||
f"Calling Dialogflow CX for event '{event_name}' in session: {session_id}"
|
||||
)
|
||||
response = await self.client.detect_intent(request=detect_request)
|
||||
|
||||
# Extract response data
|
||||
query_result = response.query_result
|
||||
response_text = self._extract_response_text(response)
|
||||
|
||||
# Map to DTO
|
||||
query_result_dto = QueryResultDTO(
|
||||
responseText=response_text,
|
||||
parameters=dict(query_result.parameters)
|
||||
if query_result.parameters
|
||||
else None,
|
||||
)
|
||||
|
||||
result = DetectIntentResponseDTO(
|
||||
responseId=response.response_id,
|
||||
queryResult=query_result_dto,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Successfully processed event '{event_name}' for session: {session_id}"
|
||||
)
|
||||
return result
|
||||
|
||||
except GoogleAPIError as e:
|
||||
logger.error(
|
||||
f"Dialogflow CX API error for event '{event_name}' in session {session_id}: {e.message}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Unexpected error triggering event '{event_name}' for session {session_id}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def close(self):
|
||||
"""Close the Dialogflow client."""
|
||||
await self.client.transport.close()
|
||||
logger.info("Dialogflow CX SessionsClient closed")
|
||||
199
src/capa_de_integracion/services/dlp_service.py
Normal file
199
src/capa_de_integracion/services/dlp_service.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Data Loss Prevention service for obfuscating sensitive information.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from google.cloud import dlp_v2
|
||||
from google.cloud.dlp_v2 import types
|
||||
|
||||
from ..config import Settings
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DLPService:
|
||||
"""
|
||||
Service for detecting and obfuscating sensitive data using Google Cloud DLP.
|
||||
|
||||
Integrates with the DLP API to scan text for PII and other sensitive information,
|
||||
then obfuscates findings based on their info type.
|
||||
"""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""
|
||||
Initialize DLP service.
|
||||
|
||||
Args:
|
||||
settings: Application settings
|
||||
"""
|
||||
self.settings = settings
|
||||
self.project_id = settings.gcp_project_id
|
||||
self.location = settings.gcp_location
|
||||
self.dlp_client = dlp_v2.DlpServiceAsyncClient()
|
||||
|
||||
logger.info("DLP Service initialized")
|
||||
|
||||
async def get_obfuscated_string(self, text: str, template_id: str) -> str:
|
||||
"""
|
||||
Inspect text for sensitive data and obfuscate findings.
|
||||
|
||||
Args:
|
||||
text: Text to inspect and obfuscate
|
||||
template_id: DLP inspect template ID
|
||||
|
||||
Returns:
|
||||
Obfuscated text with sensitive data replaced
|
||||
|
||||
Raises:
|
||||
Exception: If DLP API call fails (returns original text on error)
|
||||
"""
|
||||
if not text or not text.strip():
|
||||
return text
|
||||
|
||||
try:
|
||||
# Build content item
|
||||
byte_content_item = types.ByteContentItem(
|
||||
type_=types.ByteContentItem.BytesType.TEXT_UTF8,
|
||||
data=text.encode("utf-8"),
|
||||
)
|
||||
content_item = types.ContentItem(byte_item=byte_content_item)
|
||||
|
||||
# Build inspect config
|
||||
finding_limits = types.InspectConfig.FindingLimits(
|
||||
max_findings_per_item=0 # No limit
|
||||
)
|
||||
|
||||
inspect_config = types.InspectConfig(
|
||||
min_likelihood=types.Likelihood.VERY_UNLIKELY,
|
||||
limits=finding_limits,
|
||||
include_quote=True,
|
||||
)
|
||||
|
||||
# Build request
|
||||
inspect_template_name = f"projects/{self.project_id}/locations/{self.location}/inspectTemplates/{template_id}"
|
||||
parent = f"projects/{self.project_id}/locations/{self.location}"
|
||||
|
||||
request = types.InspectContentRequest(
|
||||
parent=parent,
|
||||
inspect_template_name=inspect_template_name,
|
||||
inspect_config=inspect_config,
|
||||
item=content_item,
|
||||
)
|
||||
|
||||
# Call DLP API
|
||||
response = await self.dlp_client.inspect_content(request=request)
|
||||
|
||||
findings_count = len(response.result.findings)
|
||||
logger.info(f"DLP {template_id} Findings: {findings_count}")
|
||||
|
||||
if findings_count > 0:
|
||||
return self._obfuscate_text(response, text)
|
||||
else:
|
||||
return text
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error during DLP inspection: {e}. Returning original text.",
|
||||
exc_info=True,
|
||||
)
|
||||
return text
|
||||
|
||||
def _obfuscate_text(self, response: types.InspectContentResponse, text: str) -> str:
|
||||
"""
|
||||
Obfuscate sensitive findings in text.
|
||||
|
||||
Args:
|
||||
response: DLP inspect content response with findings
|
||||
text: Original text
|
||||
|
||||
Returns:
|
||||
Text with sensitive data obfuscated
|
||||
"""
|
||||
# Filter findings by likelihood (> POSSIBLE, which is value 3)
|
||||
findings = [
|
||||
finding
|
||||
for finding in response.result.findings
|
||||
if finding.likelihood.value > 3
|
||||
]
|
||||
|
||||
# Sort by likelihood (descending)
|
||||
findings.sort(key=lambda f: f.likelihood.value, reverse=True)
|
||||
|
||||
for finding in findings:
|
||||
quote = finding.quote
|
||||
info_type = finding.info_type.name
|
||||
|
||||
logger.info(
|
||||
f"InfoType: {info_type} | Likelihood: {finding.likelihood.value}"
|
||||
)
|
||||
|
||||
# Obfuscate based on info type
|
||||
replacement = self._get_replacement(info_type, quote)
|
||||
if replacement:
|
||||
text = text.replace(quote, replacement)
|
||||
|
||||
# Clean up consecutive DIRECCION tags
|
||||
text = self._clean_direccion(text)
|
||||
|
||||
return text
|
||||
|
||||
def _get_replacement(self, info_type: str, quote: str) -> str | None:
|
||||
"""
|
||||
Get replacement text for a given info type.
|
||||
|
||||
Args:
|
||||
info_type: DLP info type name
|
||||
quote: Original sensitive text
|
||||
|
||||
Returns:
|
||||
Replacement text or None to skip
|
||||
"""
|
||||
replacements = {
|
||||
"CREDIT_CARD_NUMBER": f"**** **** **** {self._get_last4(quote)}",
|
||||
"CREDIT_CARD_EXPIRATION_DATE": "[FECHA_VENCIMIENTO_TARJETA]",
|
||||
"FECHA_VENCIMIENTO": "[FECHA_VENCIMIENTO_TARJETA]",
|
||||
"CVV_NUMBER": "[CVV]",
|
||||
"CVV": "[CVV]",
|
||||
"EMAIL_ADDRESS": "[CORREO]",
|
||||
"PERSON_NAME": "[NOMBRE]",
|
||||
"PHONE_NUMBER": "[TELEFONO]",
|
||||
"DIRECCION": "[DIRECCION]",
|
||||
"DIR_COLONIA": "[DIRECCION]",
|
||||
"DIR_DEL_MUN": "[DIRECCION]",
|
||||
"DIR_INTERIOR": "[DIRECCION]",
|
||||
"DIR_ESQUINA": "[DIRECCION]",
|
||||
"DIR_CIUDAD_EDO": "[DIRECCION]",
|
||||
"DIR_CP": "[DIRECCION]",
|
||||
"CLABE_INTERBANCARIA": "[CLABE]",
|
||||
"CLAVE_RASTREO_SPEI": "[CLAVE_RASTREO]",
|
||||
"NIP": "[NIP]",
|
||||
"SALDO": "[SALDO]",
|
||||
"CUENTA": f"**************{self._get_last4(quote)}",
|
||||
"NUM_ACLARACION": "[NUM_ACLARACION]",
|
||||
}
|
||||
|
||||
return replacements.get(info_type)
|
||||
|
||||
def _get_last4(self, quote: str) -> str:
|
||||
"""Extract last 4 characters from quote (removing spaces)."""
|
||||
clean_quote = quote.strip().replace(" ", "")
|
||||
if len(clean_quote) >= 4:
|
||||
return clean_quote[-4:]
|
||||
return clean_quote
|
||||
|
||||
def _clean_direccion(self, text: str) -> str:
|
||||
"""Clean up consecutive [DIRECCION] tags."""
|
||||
# Replace multiple [DIRECCION] tags separated by commas or spaces with single tag
|
||||
pattern = r"\[DIRECCION\](?:(?:,\s*|\s+)\[DIRECCION\])*"
|
||||
return re.sub(pattern, "[DIRECCION]", text).strip()
|
||||
|
||||
async def close(self):
|
||||
"""Close DLP client."""
|
||||
await self.dlp_client.transport.close()
|
||||
logger.info("DLP client closed")
|
||||
324
src/capa_de_integracion/services/firestore_service.py
Normal file
324
src/capa_de_integracion/services/firestore_service.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Firestore service for persistent conversation storage.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from google.cloud import firestore
|
||||
|
||||
from ..config import Settings
|
||||
from ..models import ConversationSessionDTO, ConversationEntryDTO
|
||||
from ..models.notification import NotificationDTO
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FirestoreService:
|
||||
"""Service for Firestore operations on conversations."""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""Initialize Firestore client."""
|
||||
self.settings = settings
|
||||
self.db = firestore.AsyncClient(
|
||||
project=settings.gcp_project_id,
|
||||
database=settings.firestore_database_id,
|
||||
)
|
||||
self.conversations_collection = (
|
||||
f"artifacts/{settings.gcp_project_id}/conversations"
|
||||
)
|
||||
self.entries_subcollection = "mensajes"
|
||||
self.notifications_collection = (
|
||||
f"artifacts/{settings.gcp_project_id}/notifications"
|
||||
)
|
||||
logger.info(
|
||||
f"Firestore client initialized for project: {settings.gcp_project_id}"
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
"""Close Firestore client."""
|
||||
await self.db.close()
|
||||
logger.info("Firestore client closed")
|
||||
|
||||
def _session_ref(self, session_id: str):
|
||||
"""Get Firestore document reference for session."""
|
||||
return self.db.collection(self.conversations_collection).document(session_id)
|
||||
|
||||
async def get_session(self, session_id: str) -> ConversationSessionDTO | None:
|
||||
"""Retrieve conversation session from Firestore by session ID."""
|
||||
try:
|
||||
doc_ref = self._session_ref(session_id)
|
||||
doc = await doc_ref.get()
|
||||
|
||||
if not doc.exists:
|
||||
logger.debug(f"Session not found in Firestore: {session_id}")
|
||||
return None
|
||||
|
||||
data = doc.to_dict()
|
||||
session = ConversationSessionDTO.model_validate(data)
|
||||
logger.debug(f"Retrieved session from Firestore: {session_id}")
|
||||
return session
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error retrieving session {session_id} from Firestore: {str(e)}"
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_session_by_phone(
|
||||
self, telefono: str
|
||||
) -> ConversationSessionDTO | None:
|
||||
"""
|
||||
Retrieve most recent conversation session from Firestore by phone number.
|
||||
|
||||
Args:
|
||||
telefono: User phone number
|
||||
|
||||
Returns:
|
||||
Most recent session for this phone, or None if not found
|
||||
"""
|
||||
try:
|
||||
query = (
|
||||
self.db.collection(self.sessions_collection)
|
||||
.where("telefono", "==", telefono)
|
||||
.order_by("lastModified", direction=firestore.Query.DESCENDING)
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
docs = query.stream()
|
||||
async for doc in docs:
|
||||
data = doc.to_dict()
|
||||
session = ConversationSessionDTO.model_validate(data)
|
||||
logger.debug(
|
||||
f"Retrieved session from Firestore for phone {telefono}: {session.sessionId}"
|
||||
)
|
||||
return session
|
||||
|
||||
logger.debug(f"No session found in Firestore for phone: {telefono}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error querying session by phone {telefono} from Firestore: {str(e)}"
|
||||
)
|
||||
return None
|
||||
|
||||
async def save_session(self, session: ConversationSessionDTO) -> bool:
|
||||
"""Save conversation session to Firestore."""
|
||||
try:
|
||||
doc_ref = self._session_ref(session.sessionId)
|
||||
data = session.model_dump()
|
||||
await doc_ref.set(data, merge=True)
|
||||
logger.debug(f"Saved session to Firestore: {session.sessionId}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error saving session {session.sessionId} to Firestore: {str(e)}"
|
||||
)
|
||||
return False
|
||||
|
||||
async def save_entry(self, session_id: str, entry: ConversationEntryDTO) -> bool:
|
||||
"""Save conversation entry to Firestore subcollection."""
|
||||
try:
|
||||
doc_ref = self._session_ref(session_id)
|
||||
entries_ref = doc_ref.collection(self.entries_subcollection)
|
||||
|
||||
# Use timestamp as document ID for chronological ordering
|
||||
entry_id = entry.timestamp.isoformat()
|
||||
entry_doc = entries_ref.document(entry_id)
|
||||
|
||||
data = entry.model_dump()
|
||||
await entry_doc.set(data)
|
||||
logger.debug(f"Saved entry to Firestore for session: {session_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error saving entry for session {session_id} to Firestore: {str(e)}"
|
||||
)
|
||||
return False
|
||||
|
||||
async def get_entries(
|
||||
self, session_id: str, limit: int = 10
|
||||
) -> list[ConversationEntryDTO]:
|
||||
"""Retrieve recent conversation entries from Firestore."""
|
||||
try:
|
||||
doc_ref = self._session_ref(session_id)
|
||||
entries_ref = doc_ref.collection(self.entries_subcollection)
|
||||
|
||||
# Get entries ordered by timestamp descending
|
||||
query = entries_ref.order_by(
|
||||
"timestamp", direction=firestore.Query.DESCENDING
|
||||
).limit(limit)
|
||||
|
||||
docs = query.stream()
|
||||
entries = []
|
||||
|
||||
async for doc in docs:
|
||||
entry_data = doc.to_dict()
|
||||
entry = ConversationEntryDTO.model_validate(entry_data)
|
||||
entries.append(entry)
|
||||
|
||||
# Reverse to get chronological order
|
||||
entries.reverse()
|
||||
logger.debug(f"Retrieved {len(entries)} entries for session: {session_id}")
|
||||
return entries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error retrieving entries for session {session_id} from Firestore: {str(e)}"
|
||||
)
|
||||
return []
|
||||
|
||||
async def delete_session(self, session_id: str) -> bool:
|
||||
"""Delete conversation session and all entries from Firestore."""
|
||||
try:
|
||||
doc_ref = self._session_ref(session_id)
|
||||
|
||||
# Delete all entries first
|
||||
entries_ref = doc_ref.collection(self.entries_subcollection)
|
||||
async for doc in entries_ref.stream():
|
||||
await doc.reference.delete()
|
||||
|
||||
# Delete session document
|
||||
await doc_ref.delete()
|
||||
logger.debug(f"Deleted session from Firestore: {session_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error deleting session {session_id} from Firestore: {str(e)}"
|
||||
)
|
||||
return False
|
||||
|
||||
# ====== Notification Methods ======
|
||||
|
||||
def _notification_ref(self, notification_id: str):
|
||||
"""Get Firestore document reference for notification."""
|
||||
return self.db.collection(self.notifications_collection).document(
|
||||
notification_id
|
||||
)
|
||||
|
||||
async def save_or_append_notification(self, new_entry: NotificationDTO) -> None:
|
||||
"""
|
||||
Save or append notification entry to Firestore.
|
||||
|
||||
Args:
|
||||
new_entry: Notification entry to save
|
||||
|
||||
Raises:
|
||||
ValueError: If phone number is missing
|
||||
"""
|
||||
phone_number = new_entry.telefono
|
||||
if not phone_number or not phone_number.strip():
|
||||
raise ValueError("Phone number is required to manage notification entries")
|
||||
|
||||
# Use phone number as document ID
|
||||
notification_session_id = phone_number
|
||||
|
||||
try:
|
||||
doc_ref = self._notification_ref(notification_session_id)
|
||||
doc = await doc_ref.get()
|
||||
|
||||
entry_dict = new_entry.model_dump()
|
||||
|
||||
if doc.exists:
|
||||
# Append to existing session
|
||||
await doc_ref.update(
|
||||
{
|
||||
"notificaciones": firestore.ArrayUnion([entry_dict]),
|
||||
"ultimaActualizacion": datetime.now(),
|
||||
}
|
||||
)
|
||||
logger.info(
|
||||
f"Successfully appended notification entry to session {notification_session_id} in Firestore"
|
||||
)
|
||||
else:
|
||||
# Create new notification session
|
||||
new_session_data = {
|
||||
"sessionId": notification_session_id,
|
||||
"telefono": phone_number,
|
||||
"fechaCreacion": datetime.now(),
|
||||
"ultimaActualizacion": datetime.now(),
|
||||
"notificaciones": [entry_dict],
|
||||
}
|
||||
await doc_ref.set(new_session_data)
|
||||
logger.info(
|
||||
f"Successfully created new notification session {notification_session_id} in Firestore"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error saving notification to Firestore for phone {phone_number}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def update_notification_status(self, session_id: str, status: str) -> None:
|
||||
"""
|
||||
Update the status of all notifications in a session.
|
||||
|
||||
Args:
|
||||
session_id: Notification session ID (phone number)
|
||||
status: New status value
|
||||
"""
|
||||
try:
|
||||
doc_ref = self._notification_ref(session_id)
|
||||
doc = await doc_ref.get()
|
||||
|
||||
if not doc.exists:
|
||||
logger.warning(
|
||||
f"Notification session {session_id} not found in Firestore. Cannot update status"
|
||||
)
|
||||
return
|
||||
|
||||
session_data = doc.to_dict()
|
||||
notifications = session_data.get("notificaciones", [])
|
||||
|
||||
# Update status for all notifications
|
||||
updated_notifications = [
|
||||
{**notif, "status": status} for notif in notifications
|
||||
]
|
||||
|
||||
await doc_ref.update(
|
||||
{
|
||||
"notificaciones": updated_notifications,
|
||||
"ultimaActualizacion": datetime.now(),
|
||||
}
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Successfully updated notification status to '{status}' for session {session_id} in Firestore"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error updating notification status in Firestore for session {session_id}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
async def delete_notification(self, notification_id: str) -> bool:
|
||||
"""Delete notification session from Firestore."""
|
||||
try:
|
||||
logger.info(
|
||||
f"Deleting notification session {notification_id} from Firestore"
|
||||
)
|
||||
doc_ref = self._notification_ref(notification_id)
|
||||
await doc_ref.delete()
|
||||
logger.info(
|
||||
f"Successfully deleted notification session {notification_id} from Firestore"
|
||||
)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error deleting notification session {notification_id} from Firestore: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return False
|
||||
100
src/capa_de_integracion/services/gemini_client.py
Normal file
100
src/capa_de_integracion/services/gemini_client.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Gemini client service for LLM operations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import google.generativeai as genai
|
||||
|
||||
from ..config import Settings
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GeminiClientException(Exception):
|
||||
"""Exception raised for Gemini API errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class GeminiClientService:
|
||||
"""Service for interacting with Google Gemini API."""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""Initialize Gemini client."""
|
||||
self.settings = settings
|
||||
|
||||
# Configure the Gemini API
|
||||
genai.configure()
|
||||
|
||||
logger.info("Gemini client initialized successfully")
|
||||
|
||||
async def generate_content(
|
||||
self,
|
||||
prompt: str,
|
||||
temperature: float,
|
||||
max_output_tokens: int,
|
||||
model_name: str,
|
||||
top_p: float,
|
||||
) -> str:
|
||||
"""
|
||||
Generate content using Gemini API.
|
||||
|
||||
Args:
|
||||
prompt: The prompt text to send to Gemini
|
||||
temperature: Sampling temperature (0.0 to 1.0)
|
||||
max_output_tokens: Maximum number of tokens to generate
|
||||
model_name: Gemini model name (e.g., "gemini-2.0-flash-exp")
|
||||
top_p: Top-p sampling parameter
|
||||
|
||||
Returns:
|
||||
Generated text response from Gemini
|
||||
|
||||
Raises:
|
||||
GeminiClientException: If API call fails
|
||||
"""
|
||||
try:
|
||||
logger.debug(f"Sending request to Gemini model '{model_name}'")
|
||||
|
||||
# Create generation config
|
||||
generation_config = genai.GenerationConfig(
|
||||
temperature=temperature,
|
||||
max_output_tokens=max_output_tokens,
|
||||
top_p=top_p,
|
||||
)
|
||||
|
||||
# Initialize model
|
||||
model = genai.GenerativeModel(
|
||||
model_name=model_name,
|
||||
generation_config=generation_config,
|
||||
)
|
||||
|
||||
# Generate content
|
||||
response = await model.generate_content_async(prompt)
|
||||
|
||||
if response and response.text:
|
||||
logger.debug(
|
||||
f"Received response from Gemini: {len(response.text)} characters"
|
||||
)
|
||||
return response.text
|
||||
else:
|
||||
logger.warning(
|
||||
f"Gemini returned no content or unexpected response structure for model '{model_name}'"
|
||||
)
|
||||
raise GeminiClientException(
|
||||
"No content generated or unexpected response structure"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error during Gemini content generation for model '{model_name}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise GeminiClientException(
|
||||
f"An error occurred during content generation: {str(e)}"
|
||||
) from e
|
||||
105
src/capa_de_integracion/services/llm_response_tuner.py
Normal file
105
src/capa_de_integracion/services/llm_response_tuner.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
LLM Response Tuner service for storing/retrieving pre-generated responses.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from redis.asyncio import Redis
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LlmResponseTunerService:
|
||||
"""
|
||||
Service for managing pre-generated LLM responses in Redis.
|
||||
|
||||
Used as a webhook bridge where:
|
||||
1. LLM responses are pre-generated and stored with a UUID
|
||||
2. Dialogflow webhook calls this service with the UUID
|
||||
3. Service retrieves and returns the response
|
||||
"""
|
||||
|
||||
def __init__(self, redis: Redis):
|
||||
"""
|
||||
Initialize LLM response tuner service.
|
||||
|
||||
Args:
|
||||
redis: Redis client instance
|
||||
"""
|
||||
self.redis = redis
|
||||
self.collection_prefix = "llm-pre-response:"
|
||||
self.ttl = 3600 # 1 hour in seconds
|
||||
|
||||
logger.info("LlmResponseTunerService initialized")
|
||||
|
||||
def _get_key(self, uuid: str) -> str:
|
||||
"""Generate Redis key for UUID."""
|
||||
return f"{self.collection_prefix}{uuid}"
|
||||
|
||||
async def get_value(self, uuid: str) -> str | None:
|
||||
"""
|
||||
Retrieve pre-generated response by UUID.
|
||||
|
||||
Args:
|
||||
uuid: Unique identifier for the response
|
||||
|
||||
Returns:
|
||||
Response text or None if not found
|
||||
"""
|
||||
if not uuid or not uuid.strip():
|
||||
logger.warning("UUID is null or blank")
|
||||
return None
|
||||
|
||||
key = self._get_key(uuid)
|
||||
|
||||
try:
|
||||
value = await self.redis.get(key)
|
||||
if value:
|
||||
logger.info(f"Retrieved LLM response for UUID: {uuid}")
|
||||
return value
|
||||
else:
|
||||
logger.warning(f"No response found for UUID: {uuid}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error retrieving LLM response for UUID {uuid}: {e}", exc_info=True
|
||||
)
|
||||
return None
|
||||
|
||||
async def set_value(self, uuid: str, value: str) -> bool:
|
||||
"""
|
||||
Store pre-generated response with UUID.
|
||||
|
||||
Args:
|
||||
uuid: Unique identifier for the response
|
||||
value: Response text to store
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if not uuid or not uuid.strip():
|
||||
logger.warning("UUID is null or blank")
|
||||
return False
|
||||
|
||||
if not value:
|
||||
logger.warning("Value is null or empty")
|
||||
return False
|
||||
|
||||
key = self._get_key(uuid)
|
||||
|
||||
try:
|
||||
await self.redis.setex(key, self.ttl, value)
|
||||
logger.info(f"Stored LLM response for UUID: {uuid} with TTL: {self.ttl}s")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error storing LLM response for UUID {uuid}: {e}", exc_info=True
|
||||
)
|
||||
return False
|
||||
229
src/capa_de_integracion/services/mappers.py
Normal file
229
src/capa_de_integracion/services/mappers.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Mappers for converting DTOs to text format for Gemini API.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from ..models import (
|
||||
ConversationSessionDTO,
|
||||
ConversationEntryDTO,
|
||||
)
|
||||
from ..models.notification import NotificationDTO
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NotificationContextMapper:
|
||||
"""Maps notifications to text format for Gemini classification."""
|
||||
|
||||
@staticmethod
|
||||
def to_text(notification: NotificationDTO) -> str:
|
||||
"""
|
||||
Convert a notification to text format.
|
||||
|
||||
Args:
|
||||
notification: Notification DTO
|
||||
|
||||
Returns:
|
||||
Notification text
|
||||
"""
|
||||
if not notification or not notification.texto:
|
||||
return ""
|
||||
return notification.texto
|
||||
|
||||
@staticmethod
|
||||
def to_text_multiple(notifications: list[NotificationDTO]) -> str:
|
||||
"""
|
||||
Convert multiple notifications to text format.
|
||||
|
||||
Args:
|
||||
notifications: List of notification DTOs
|
||||
|
||||
Returns:
|
||||
Notifications joined by newlines
|
||||
"""
|
||||
if not notifications:
|
||||
return ""
|
||||
|
||||
texts = [n.texto for n in notifications if n.texto and n.texto.strip()]
|
||||
return "\n".join(texts)
|
||||
|
||||
@staticmethod
|
||||
def to_json(notification: NotificationDTO) -> str:
|
||||
"""
|
||||
Convert notification to JSON string for Gemini.
|
||||
|
||||
Args:
|
||||
notification: Notification DTO
|
||||
|
||||
Returns:
|
||||
JSON representation
|
||||
"""
|
||||
if not notification:
|
||||
return "{}"
|
||||
|
||||
data = {
|
||||
"texto": notification.texto,
|
||||
"parametros": notification.parametros or {},
|
||||
"timestamp": notification.timestampCreacion.isoformat(),
|
||||
}
|
||||
return json.dumps(data, ensure_ascii=False)
|
||||
|
||||
|
||||
class ConversationContextMapper:
|
||||
"""Maps conversation history to text format for Gemini."""
|
||||
|
||||
# Business rules for conversation history limits
|
||||
MESSAGE_LIMIT = 60 # Maximum 60 messages
|
||||
DAYS_LIMIT = 30 # Maximum 30 days
|
||||
MAX_HISTORY_BYTES = 50 * 1024 # 50 KB maximum size
|
||||
|
||||
NOTIFICATION_TEXT_PARAM = "notification_text"
|
||||
|
||||
def __init__(self, message_limit: int = 60, days_limit: int = 30):
|
||||
"""
|
||||
Initialize conversation context mapper.
|
||||
|
||||
Args:
|
||||
message_limit: Maximum number of messages to include
|
||||
days_limit: Maximum age of messages in days
|
||||
"""
|
||||
self.message_limit = message_limit
|
||||
self.days_limit = days_limit
|
||||
|
||||
def to_text_from_entries(self, entries: list[ConversationEntryDTO]) -> str:
|
||||
"""
|
||||
Convert conversation entries to text format.
|
||||
|
||||
Args:
|
||||
entries: List of conversation entries
|
||||
|
||||
Returns:
|
||||
Formatted conversation history
|
||||
"""
|
||||
if not entries:
|
||||
return ""
|
||||
|
||||
formatted = [self._format_entry(entry) for entry in entries]
|
||||
return "\n".join(formatted)
|
||||
|
||||
def to_text_with_limits(
|
||||
self,
|
||||
session: ConversationSessionDTO,
|
||||
entries: list[ConversationEntryDTO],
|
||||
) -> str:
|
||||
"""
|
||||
Convert conversation to text with business rule limits applied.
|
||||
|
||||
Applies:
|
||||
- Days limit (30 days)
|
||||
- Message limit (60 messages)
|
||||
- Size limit (50 KB)
|
||||
|
||||
Args:
|
||||
session: Conversation session
|
||||
entries: List of conversation entries
|
||||
|
||||
Returns:
|
||||
Formatted conversation history with limits applied
|
||||
"""
|
||||
if not entries:
|
||||
return ""
|
||||
|
||||
# Filter by date (30 days)
|
||||
cutoff_date = datetime.now() - timedelta(days=self.days_limit)
|
||||
recent_entries = [
|
||||
e for e in entries if e.timestamp and e.timestamp >= cutoff_date
|
||||
]
|
||||
|
||||
# Sort by timestamp (oldest first) and limit count
|
||||
recent_entries.sort(key=lambda e: e.timestamp)
|
||||
limited_entries = recent_entries[-self.message_limit :]
|
||||
|
||||
# Apply size truncation (50 KB)
|
||||
return self._to_text_with_truncation(limited_entries)
|
||||
|
||||
def _to_text_with_truncation(self, entries: list[ConversationEntryDTO]) -> str:
|
||||
"""
|
||||
Format entries with size truncation (50 KB max).
|
||||
|
||||
Args:
|
||||
entries: List of conversation entries
|
||||
|
||||
Returns:
|
||||
Formatted text, truncated if necessary
|
||||
"""
|
||||
if not entries:
|
||||
return ""
|
||||
|
||||
# Format all messages
|
||||
formatted_messages = [self._format_entry(entry) for entry in entries]
|
||||
|
||||
# Build from newest to oldest, stopping at 50KB
|
||||
text_block = []
|
||||
current_size = 0
|
||||
|
||||
# Iterate from newest to oldest
|
||||
for message in reversed(formatted_messages):
|
||||
message_line = message + "\n"
|
||||
message_bytes = len(message_line.encode("utf-8"))
|
||||
|
||||
if current_size + message_bytes > self.MAX_HISTORY_BYTES:
|
||||
break
|
||||
|
||||
text_block.insert(0, message_line)
|
||||
current_size += message_bytes
|
||||
|
||||
return "".join(text_block).strip()
|
||||
|
||||
def _format_entry(self, entry: ConversationEntryDTO) -> str:
|
||||
"""
|
||||
Format a single conversation entry.
|
||||
|
||||
Args:
|
||||
entry: Conversation entry
|
||||
|
||||
Returns:
|
||||
Formatted string (e.g., "User: hello", "Agent: hi there")
|
||||
"""
|
||||
prefix = "User: "
|
||||
content = entry.text
|
||||
|
||||
# Determine prefix based on entity
|
||||
if entry.entity == "AGENTE":
|
||||
prefix = "Agent: "
|
||||
# Clean JSON artifacts from agent messages
|
||||
content = self._clean_agent_message(content)
|
||||
elif entry.entity == "SISTEMA":
|
||||
prefix = "System: "
|
||||
# Check if this is a notification in parameters
|
||||
if entry.parameters and self.NOTIFICATION_TEXT_PARAM in entry.parameters:
|
||||
param_text = entry.parameters[self.NOTIFICATION_TEXT_PARAM]
|
||||
if param_text and str(param_text).strip():
|
||||
content = str(param_text)
|
||||
elif entry.entity == "LLM":
|
||||
prefix = "System: "
|
||||
|
||||
return prefix + content
|
||||
|
||||
def _clean_agent_message(self, message: str) -> str:
|
||||
"""
|
||||
Clean agent message by removing JSON artifacts at the end.
|
||||
|
||||
Args:
|
||||
message: Original message
|
||||
|
||||
Returns:
|
||||
Cleaned message
|
||||
"""
|
||||
# Remove trailing {...} patterns
|
||||
import re
|
||||
|
||||
return re.sub(r"\s*\{.*\}\s*$", "", message).strip()
|
||||
156
src/capa_de_integracion/services/message_filter.py
Normal file
156
src/capa_de_integracion/services/message_filter.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Message classification service using Gemini LLM.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from ..config import Settings
|
||||
from .gemini_client import GeminiClientService, GeminiClientException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MessageEntryFilter:
|
||||
"""
|
||||
Classifies a user's text input into a predefined category using Gemini.
|
||||
|
||||
Analyzes user queries in the context of conversation history and
|
||||
notifications to determine if the message is part of ongoing dialogue
|
||||
or an interruption. Classification is used to route requests to the
|
||||
appropriate handler.
|
||||
"""
|
||||
|
||||
# Classification categories
|
||||
CATEGORY_CONVERSATION = "CONVERSATION"
|
||||
CATEGORY_NOTIFICATION = "NOTIFICATION"
|
||||
CATEGORY_UNKNOWN = "UNKNOWN"
|
||||
CATEGORY_ERROR = "ERROR"
|
||||
|
||||
def __init__(self, settings: Settings, gemini_service: GeminiClientService):
|
||||
"""
|
||||
Initialize message filter.
|
||||
|
||||
Args:
|
||||
settings: Application settings
|
||||
gemini_service: Gemini client service
|
||||
"""
|
||||
self.settings = settings
|
||||
self.gemini_service = gemini_service
|
||||
self.prompt_template = self._load_prompt_template()
|
||||
|
||||
logger.info("MessageEntryFilter initialized successfully")
|
||||
|
||||
def _load_prompt_template(self) -> str:
|
||||
"""Load the prompt template from resources."""
|
||||
prompt_path = self.settings.base_path / self.settings.message_filter_prompt_path
|
||||
|
||||
try:
|
||||
with open(prompt_path, "r", encoding="utf-8") as f:
|
||||
prompt_template = f.read()
|
||||
logger.info(f"Successfully loaded prompt template from '{prompt_path}'")
|
||||
return prompt_template
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load prompt template from '{prompt_path}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise RuntimeError("Could not load prompt template") from e
|
||||
|
||||
async def classify_message(
|
||||
self,
|
||||
query_input_text: str,
|
||||
notifications_json: str | None = None,
|
||||
conversation_json: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Classify a user message as CONVERSATION, NOTIFICATION, or UNKNOWN.
|
||||
|
||||
Args:
|
||||
query_input_text: The user's input text to classify
|
||||
notifications_json: JSON string of interrupting notifications (optional)
|
||||
conversation_json: JSON string of conversation history (optional)
|
||||
|
||||
Returns:
|
||||
Classification category (CONVERSATION, NOTIFICATION, or UNKNOWN)
|
||||
"""
|
||||
if not query_input_text or not query_input_text.strip():
|
||||
logger.warning(
|
||||
f"Query input text for classification is null or blank. Returning {self.CATEGORY_UNKNOWN}"
|
||||
)
|
||||
return self.CATEGORY_UNKNOWN
|
||||
|
||||
# Prepare context strings
|
||||
interrupting_notification = (
|
||||
notifications_json
|
||||
if notifications_json and notifications_json.strip()
|
||||
else "No interrupting notification."
|
||||
)
|
||||
|
||||
conversation_history = (
|
||||
conversation_json
|
||||
if conversation_json and conversation_json.strip()
|
||||
else "No conversation history."
|
||||
)
|
||||
|
||||
# Format the classification prompt
|
||||
classification_prompt = self.prompt_template % (
|
||||
conversation_history,
|
||||
interrupting_notification,
|
||||
query_input_text,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Sending classification request to Gemini for input (first 100 chars): "
|
||||
f"'{query_input_text[:100]}...'"
|
||||
)
|
||||
|
||||
try:
|
||||
# Call Gemini API
|
||||
gemini_response = await self.gemini_service.generate_content(
|
||||
prompt=classification_prompt,
|
||||
temperature=self.settings.message_filter_temperature,
|
||||
max_output_tokens=self.settings.message_filter_max_tokens,
|
||||
model_name=self.settings.message_filter_model,
|
||||
top_p=self.settings.message_filter_top_p,
|
||||
)
|
||||
|
||||
# Parse and validate response
|
||||
if not gemini_response:
|
||||
logger.warning(
|
||||
f"Gemini returned null/blank response. Returning {self.CATEGORY_UNKNOWN}"
|
||||
)
|
||||
return self.CATEGORY_UNKNOWN
|
||||
|
||||
response_upper = gemini_response.strip().upper()
|
||||
|
||||
if response_upper == self.CATEGORY_CONVERSATION:
|
||||
logger.info(f"Classified as {self.CATEGORY_CONVERSATION}")
|
||||
return self.CATEGORY_CONVERSATION
|
||||
elif response_upper == self.CATEGORY_NOTIFICATION:
|
||||
logger.info(f"Classified as {self.CATEGORY_NOTIFICATION}")
|
||||
return self.CATEGORY_NOTIFICATION
|
||||
else:
|
||||
logger.warning(
|
||||
f"Gemini returned unrecognized classification: '{gemini_response}'. "
|
||||
f"Expected '{self.CATEGORY_CONVERSATION}' or '{self.CATEGORY_NOTIFICATION}'. "
|
||||
f"Returning {self.CATEGORY_UNKNOWN}"
|
||||
)
|
||||
return self.CATEGORY_UNKNOWN
|
||||
|
||||
except GeminiClientException as e:
|
||||
logger.error(
|
||||
f"Error during Gemini content generation for message classification: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.CATEGORY_UNKNOWN
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Unexpected error during message classification: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.CATEGORY_UNKNOWN
|
||||
@@ -0,0 +1,192 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Notification context resolver using Gemini LLM.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from ..config import Settings
|
||||
from .gemini_client import GeminiClientService, GeminiClientException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NotificationContextResolver:
|
||||
"""
|
||||
Resolves conversational context using LLM to answer notification-related questions.
|
||||
|
||||
Evaluates a user's question in the context of a notification and conversation
|
||||
history. Decides if the query can be answered by the LLM (using notification
|
||||
metadata) or should be delegated to Dialogflow.
|
||||
"""
|
||||
|
||||
# Response categories
|
||||
CATEGORY_DIALOGFLOW = "DIALOGFLOW"
|
||||
|
||||
def __init__(self, settings: Settings, gemini_service: GeminiClientService):
|
||||
"""
|
||||
Initialize notification context resolver.
|
||||
|
||||
Args:
|
||||
settings: Application settings
|
||||
gemini_service: Gemini client service
|
||||
"""
|
||||
self.settings = settings
|
||||
self.gemini_service = gemini_service
|
||||
|
||||
# Load settings (with defaults matching Java)
|
||||
self.model_name = getattr(
|
||||
settings, "notification_context_model", "gemini-2.0-flash-001"
|
||||
)
|
||||
self.temperature = getattr(settings, "notification_context_temperature", 0.1)
|
||||
self.max_tokens = getattr(settings, "notification_context_max_tokens", 1024)
|
||||
self.top_p = getattr(settings, "notification_context_top_p", 0.1)
|
||||
self.prompt_path = getattr(
|
||||
settings,
|
||||
"notification_context_prompt_path",
|
||||
"prompts/notification_context_resolver.txt",
|
||||
)
|
||||
|
||||
self.prompt_template = self._load_prompt_template()
|
||||
|
||||
logger.info("NotificationContextResolver initialized successfully")
|
||||
|
||||
def _load_prompt_template(self) -> str:
|
||||
"""Load the prompt template from resources."""
|
||||
prompt_path = self.settings.base_path / self.prompt_path
|
||||
|
||||
try:
|
||||
with open(prompt_path, "r", encoding="utf-8") as f:
|
||||
prompt_template = f.read()
|
||||
logger.info(f"Successfully loaded prompt template from '{prompt_path}'")
|
||||
return prompt_template
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load prompt template from '{prompt_path}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise RuntimeError("Could not load prompt template") from e
|
||||
|
||||
async def resolve_context(
|
||||
self,
|
||||
query_input_text: str,
|
||||
notifications_json: str | None = None,
|
||||
conversation_json: str | None = None,
|
||||
metadata: str | None = None,
|
||||
user_id: str | None = None,
|
||||
session_id: str | None = None,
|
||||
user_phone_number: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Resolve context and generate response for notification-related question.
|
||||
|
||||
Uses Gemini to analyze the question against notification metadata and
|
||||
conversation history. Returns either:
|
||||
- A direct answer generated by the LLM
|
||||
- "DIALOGFLOW" to delegate to standard Dialogflow flow
|
||||
|
||||
Priority order for finding answers:
|
||||
1. METADATOS_NOTIFICACION (highest authority)
|
||||
2. HISTORIAL_CONVERSACION parameters with "notification_po_" prefix
|
||||
3. If not found, return "DIALOGFLOW"
|
||||
|
||||
Args:
|
||||
query_input_text: User's question
|
||||
notifications_json: JSON string of notifications
|
||||
conversation_json: JSON string of conversation history
|
||||
metadata: Structured notification metadata
|
||||
user_id: User identifier (optional, for logging)
|
||||
session_id: Session identifier (optional, for logging)
|
||||
user_phone_number: User phone number (optional, for logging)
|
||||
|
||||
Returns:
|
||||
Either a direct LLM-generated answer or "DIALOGFLOW"
|
||||
"""
|
||||
logger.debug(
|
||||
f"resolveContext -> queryInputText: {query_input_text}, "
|
||||
f"notificationsJson: {notifications_json}, "
|
||||
f"conversationJson: {conversation_json}, "
|
||||
f"metadata: {metadata}"
|
||||
)
|
||||
|
||||
if not query_input_text or not query_input_text.strip():
|
||||
logger.warning(
|
||||
f"Query input text for context resolution is null or blank. "
|
||||
f"Returning {self.CATEGORY_DIALOGFLOW}"
|
||||
)
|
||||
return self.CATEGORY_DIALOGFLOW
|
||||
|
||||
# Prepare context strings
|
||||
notification_content = (
|
||||
notifications_json
|
||||
if notifications_json and notifications_json.strip()
|
||||
else "No metadata in notification."
|
||||
)
|
||||
|
||||
conversation_history = (
|
||||
conversation_json
|
||||
if conversation_json and conversation_json.strip()
|
||||
else "No conversation history."
|
||||
)
|
||||
|
||||
notification_metadata = metadata if metadata and metadata.strip() else "{}"
|
||||
|
||||
# Format the context resolution prompt
|
||||
context_prompt = self.prompt_template % (
|
||||
conversation_history,
|
||||
notification_content,
|
||||
notification_metadata,
|
||||
query_input_text,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Sending context resolution request to Gemini for input (first 100 chars): "
|
||||
f"'{query_input_text[:100]}...'"
|
||||
)
|
||||
|
||||
try:
|
||||
# Call Gemini API
|
||||
gemini_response = await self.gemini_service.generate_content(
|
||||
prompt=context_prompt,
|
||||
temperature=self.temperature,
|
||||
max_output_tokens=self.max_tokens,
|
||||
model_name=self.model_name,
|
||||
top_p=self.top_p,
|
||||
)
|
||||
|
||||
if gemini_response and gemini_response.strip():
|
||||
# Check if response is delegation to Dialogflow
|
||||
if gemini_response.strip().upper() == self.CATEGORY_DIALOGFLOW:
|
||||
logger.debug(
|
||||
f"Resolved to {self.CATEGORY_DIALOGFLOW}. Input: '{query_input_text}'"
|
||||
)
|
||||
return self.CATEGORY_DIALOGFLOW
|
||||
else:
|
||||
# LLM provided a direct answer
|
||||
logger.debug(
|
||||
f"Resolved to a specific response. Input: '{query_input_text}'"
|
||||
)
|
||||
return gemini_response
|
||||
else:
|
||||
logger.warning(
|
||||
f"Gemini returned a null or blank response. "
|
||||
f"Returning {self.CATEGORY_DIALOGFLOW}"
|
||||
)
|
||||
return self.CATEGORY_DIALOGFLOW
|
||||
|
||||
except GeminiClientException as e:
|
||||
logger.error(
|
||||
f"Error during Gemini content generation for context resolution: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.CATEGORY_DIALOGFLOW
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Unexpected error during context resolution: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.CATEGORY_DIALOGFLOW
|
||||
259
src/capa_de_integracion/services/notification_manager.py
Normal file
259
src/capa_de_integracion/services/notification_manager.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Notification manager service for processing push notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from ..config import Settings
|
||||
from ..models import DetectIntentResponseDTO
|
||||
from ..models.notification import ExternalNotRequestDTO, NotificationDTO
|
||||
from ..models.conversation import ConversationSessionDTO, ConversationEntryDTO
|
||||
from ..utils.session_id import generate_session_id
|
||||
from .dialogflow_client import DialogflowClientService
|
||||
from .redis_service import RedisService
|
||||
from .firestore_service import FirestoreService
|
||||
from .dlp_service import DLPService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PREFIX_PO_PARAM = "notification_po_"
|
||||
|
||||
|
||||
class NotificationManagerService:
|
||||
"""
|
||||
Manages notification processing and integration with conversations.
|
||||
|
||||
Handles push notifications from external systems, stores them in
|
||||
Redis/Firestore, and triggers Dialogflow event detection.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
settings: Settings,
|
||||
dialogflow_client: DialogflowClientService,
|
||||
redis_service: RedisService,
|
||||
firestore_service: FirestoreService,
|
||||
dlp_service: DLPService,
|
||||
):
|
||||
"""
|
||||
Initialize notification manager.
|
||||
|
||||
Args:
|
||||
settings: Application settings
|
||||
dialogflow_client: Dialogflow CX client
|
||||
redis_service: Redis caching service
|
||||
firestore_service: Firestore persistence service
|
||||
dlp_service: Data Loss Prevention service
|
||||
"""
|
||||
self.settings = settings
|
||||
self.dialogflow_client = dialogflow_client
|
||||
self.redis_service = redis_service
|
||||
self.firestore_service = firestore_service
|
||||
self.dlp_service = dlp_service
|
||||
self.default_language_code = settings.dialogflow_default_language
|
||||
self.event_name = "notificacion"
|
||||
|
||||
logger.info("NotificationManagerService initialized")
|
||||
|
||||
async def process_notification(
|
||||
self, external_request: ExternalNotRequestDTO
|
||||
) -> DetectIntentResponseDTO:
|
||||
"""
|
||||
Process a push notification from external system.
|
||||
|
||||
Flow:
|
||||
1. Validate phone number
|
||||
2. Obfuscate sensitive data (DLP - TODO)
|
||||
3. Create notification entry
|
||||
4. Save to Redis and Firestore
|
||||
5. Get or create conversation session
|
||||
6. Add notification to conversation history
|
||||
7. Trigger Dialogflow event
|
||||
|
||||
Args:
|
||||
external_request: External notification request
|
||||
|
||||
Returns:
|
||||
Dialogflow detect intent response
|
||||
|
||||
Raises:
|
||||
ValueError: If phone number is missing
|
||||
"""
|
||||
telefono = external_request.telefono
|
||||
|
||||
if not telefono or not telefono.strip():
|
||||
logger.warning("No phone number provided in notification request")
|
||||
raise ValueError("Phone number is required")
|
||||
|
||||
# Obfuscate sensitive data using DLP
|
||||
obfuscated_text = await self.dlp_service.get_obfuscated_string(
|
||||
external_request.texto,
|
||||
self.settings.dlp_template_complete_flow,
|
||||
)
|
||||
|
||||
# Prepare parameters with prefix
|
||||
parameters = {}
|
||||
if external_request.parametros_ocultos:
|
||||
for key, value in external_request.parametros_ocultos.items():
|
||||
parameters[f"{PREFIX_PO_PARAM}{key}"] = value
|
||||
|
||||
# Create notification entry
|
||||
new_notification_id = generate_session_id()
|
||||
new_notification_entry = NotificationDTO(
|
||||
idNotificacion=new_notification_id,
|
||||
telefono=telefono,
|
||||
timestampCreacion=datetime.now(),
|
||||
texto=obfuscated_text,
|
||||
nombreEventoDialogflow=self.event_name,
|
||||
codigoIdiomaDialogflow=self.default_language_code,
|
||||
parametros=parameters,
|
||||
status="active",
|
||||
)
|
||||
|
||||
# Save notification to Redis (with async Firestore write-back)
|
||||
await self.redis_service.save_or_append_notification(new_notification_entry)
|
||||
logger.info(
|
||||
f"Notification for phone {telefono} cached. Kicking off async Firestore write-back"
|
||||
)
|
||||
|
||||
# Fire-and-forget Firestore write
|
||||
# In production, consider using asyncio.create_task() with proper error handling
|
||||
try:
|
||||
await self.firestore_service.save_or_append_notification(
|
||||
new_notification_entry
|
||||
)
|
||||
logger.debug(
|
||||
f"Notification entry persisted to Firestore for phone {telefono}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Background: Error during notification persistence to Firestore for phone {telefono}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# Get or create conversation session
|
||||
session = await self._get_or_create_conversation_session(
|
||||
telefono, obfuscated_text, parameters
|
||||
)
|
||||
|
||||
# Send notification event to Dialogflow
|
||||
logger.info(
|
||||
f"Sending notification text to Dialogflow using conversation session: {session.sessionId}"
|
||||
)
|
||||
|
||||
response = await self.dialogflow_client.detect_intent_event(
|
||||
session_id=session.sessionId,
|
||||
event_name=self.event_name,
|
||||
parameters=parameters,
|
||||
language_code=self.default_language_code,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Finished processing notification. Dialogflow response received for phone {telefono}"
|
||||
)
|
||||
return response
|
||||
|
||||
async def _get_or_create_conversation_session(
|
||||
self, telefono: str, notification_text: str, parameters: dict
|
||||
) -> ConversationSessionDTO:
|
||||
"""
|
||||
Get existing conversation session or create a new one.
|
||||
|
||||
Also persists system entry for the notification.
|
||||
|
||||
Args:
|
||||
telefono: User phone number
|
||||
notification_text: Notification text content
|
||||
parameters: Notification parameters
|
||||
|
||||
Returns:
|
||||
Conversation session
|
||||
"""
|
||||
# Try to get existing session by phone
|
||||
# TODO: Need to implement get_session_by_telefono in Redis service
|
||||
# For now, we'll create a new session
|
||||
|
||||
new_session_id = generate_session_id()
|
||||
user_id = f"user_by_phone_{telefono}"
|
||||
|
||||
logger.info(
|
||||
f"Creating new conversation session {new_session_id} for notification (phone: {telefono})"
|
||||
)
|
||||
|
||||
# Create system entry for notification
|
||||
system_entry = ConversationEntryDTO(
|
||||
entity="SISTEMA",
|
||||
type="SISTEMA",
|
||||
timestamp=datetime.now(),
|
||||
text=notification_text,
|
||||
parameters=parameters,
|
||||
intent=None,
|
||||
)
|
||||
|
||||
# Create new session
|
||||
new_session = ConversationSessionDTO(
|
||||
sessionId=new_session_id,
|
||||
userId=user_id,
|
||||
telefono=telefono,
|
||||
createdAt=datetime.now(),
|
||||
lastModified=datetime.now(),
|
||||
lastMessage=notification_text,
|
||||
pantallaContexto=None,
|
||||
)
|
||||
|
||||
# Persist conversation turn (session + system entry)
|
||||
await self._persist_conversation_turn(new_session, system_entry)
|
||||
|
||||
return new_session
|
||||
|
||||
async def _persist_conversation_turn(
|
||||
self, session: ConversationSessionDTO, entry: ConversationEntryDTO
|
||||
) -> None:
|
||||
"""
|
||||
Persist conversation turn to Redis and Firestore.
|
||||
|
||||
Uses write-through caching: writes to Redis first, then async to Firestore.
|
||||
|
||||
Args:
|
||||
session: Conversation session
|
||||
entry: Conversation entry to persist
|
||||
"""
|
||||
logger.debug(
|
||||
f"Starting Write-Back persistence for notification session {session.sessionId}. "
|
||||
f"Type: {entry.type}. Writing to Redis first"
|
||||
)
|
||||
|
||||
# Update session with last message
|
||||
updated_session = ConversationSessionDTO(
|
||||
**session.model_dump(),
|
||||
lastMessage=entry.text,
|
||||
lastModified=datetime.now(),
|
||||
)
|
||||
|
||||
# Save to Redis
|
||||
await self.redis_service.save_session(updated_session)
|
||||
logger.info(
|
||||
f"Entry saved to Redis for notification session {session.sessionId}. "
|
||||
f"Type: {entry.type}. Kicking off async Firestore write-back"
|
||||
)
|
||||
|
||||
# Fire-and-forget Firestore writes
|
||||
try:
|
||||
await self.firestore_service.save_session(updated_session)
|
||||
await self.firestore_service.save_entry(session.sessionId, entry)
|
||||
logger.debug(
|
||||
f"Asynchronously (Write-Back): Entry successfully saved to Firestore "
|
||||
f"for notification session {session.sessionId}. Type: {entry.type}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Asynchronously (Write-Back): Failed to save entry to Firestore "
|
||||
f"for notification session {session.sessionId}. Type: {entry.type}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
98
src/capa_de_integracion/services/quick_reply_content.py
Normal file
98
src/capa_de_integracion/services/quick_reply_content.py
Normal file
@@ -0,0 +1,98 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Quick Reply content service for loading FAQ screens.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
from ..config import Settings
|
||||
from ..models.quick_replies import QuickReplyDTO, QuestionDTO
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class QuickReplyContentService:
|
||||
"""Service for loading quick reply screen content from JSON files."""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""
|
||||
Initialize quick reply content service.
|
||||
|
||||
Args:
|
||||
settings: Application settings
|
||||
"""
|
||||
self.settings = settings
|
||||
self.quick_replies_path = settings.base_path / "quick_replies"
|
||||
|
||||
logger.info(
|
||||
f"QuickReplyContentService initialized with path: {self.quick_replies_path}"
|
||||
)
|
||||
|
||||
async def get_quick_replies(self, screen_id: str) -> QuickReplyDTO | None:
|
||||
"""
|
||||
Load quick reply screen content by ID.
|
||||
|
||||
Args:
|
||||
screen_id: Screen identifier (e.g., "pagos", "home")
|
||||
|
||||
Returns:
|
||||
Quick reply DTO or None if not found
|
||||
"""
|
||||
if not screen_id or not screen_id.strip():
|
||||
logger.warning("screen_id is null or empty. Returning empty quick replies")
|
||||
return QuickReplyDTO(
|
||||
header="empty",
|
||||
body=None,
|
||||
button=None,
|
||||
header_section=None,
|
||||
preguntas=[],
|
||||
)
|
||||
|
||||
file_path = self.quick_replies_path / f"{screen_id}.json"
|
||||
|
||||
try:
|
||||
if not file_path.exists():
|
||||
logger.warning(f"Quick reply file not found: {file_path}")
|
||||
return None
|
||||
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Parse questions
|
||||
preguntas_data = data.get("preguntas", [])
|
||||
preguntas = [
|
||||
QuestionDTO(
|
||||
titulo=q.get("titulo", ""),
|
||||
descripcion=q.get("descripcion"),
|
||||
respuesta=q.get("respuesta", ""),
|
||||
)
|
||||
for q in preguntas_data
|
||||
]
|
||||
|
||||
quick_reply = QuickReplyDTO(
|
||||
header=data.get("header"),
|
||||
body=data.get("body"),
|
||||
button=data.get("button"),
|
||||
header_section=data.get("header_section"),
|
||||
preguntas=preguntas,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Successfully loaded {len(preguntas)} quick replies for screen: {screen_id}"
|
||||
)
|
||||
return quick_reply
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Error parsing JSON file {file_path}: {e}", exc_info=True)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error loading quick replies for screen {screen_id}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
373
src/capa_de_integracion/services/redis_service.py
Normal file
373
src/capa_de_integracion/services/redis_service.py
Normal file
@@ -0,0 +1,373 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Redis service for caching conversation sessions.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from redis.asyncio import Redis
|
||||
|
||||
from ..config import Settings
|
||||
from ..models import ConversationSessionDTO
|
||||
from ..models.notification import NotificationSessionDTO, NotificationDTO
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RedisService:
|
||||
"""Service for Redis operations on conversation sessions."""
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
"""Initialize Redis client."""
|
||||
self.settings = settings
|
||||
self.redis: Redis | None = None
|
||||
self.session_ttl = 2592000 # 30 days in seconds
|
||||
self.notification_ttl = 2592000 # 30 days in seconds
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to Redis."""
|
||||
self.redis = Redis(
|
||||
host=self.settings.redis_host,
|
||||
port=self.settings.redis_port,
|
||||
password=self.settings.redis_password,
|
||||
ssl=self.settings.redis_ssl,
|
||||
decode_responses=True,
|
||||
)
|
||||
logger.info(
|
||||
f"Connected to Redis at {self.settings.redis_host}:{self.settings.redis_port}"
|
||||
)
|
||||
|
||||
async def close(self):
|
||||
"""Close Redis connection."""
|
||||
if self.redis:
|
||||
await self.redis.close()
|
||||
logger.info("Redis connection closed")
|
||||
|
||||
def _session_key(self, session_id: str) -> str:
|
||||
"""Generate Redis key for conversation session."""
|
||||
return f"conversation:session:{session_id}"
|
||||
|
||||
def _phone_to_session_key(self, phone: str) -> str:
|
||||
"""Generate Redis key for phone-to-session mapping."""
|
||||
return f"conversation:phone:{phone}"
|
||||
|
||||
async def get_session(
|
||||
self, session_id_or_phone: str
|
||||
) -> ConversationSessionDTO | None:
|
||||
"""
|
||||
Retrieve conversation session from Redis by session ID or phone number.
|
||||
|
||||
Args:
|
||||
session_id_or_phone: Either a session ID or phone number
|
||||
|
||||
Returns:
|
||||
Conversation session or None if not found
|
||||
"""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
# First try as phone number (lookup session ID)
|
||||
phone_key = self._phone_to_session_key(session_id_or_phone)
|
||||
mapped_session_id = await self.redis.get(phone_key)
|
||||
|
||||
if mapped_session_id:
|
||||
# Found phone mapping, get the actual session
|
||||
session_id = mapped_session_id
|
||||
else:
|
||||
# Try as direct session ID
|
||||
session_id = session_id_or_phone
|
||||
|
||||
# Get session by ID
|
||||
key = self._session_key(session_id)
|
||||
data = await self.redis.get(key)
|
||||
|
||||
if not data:
|
||||
logger.debug(f"Session not found in Redis: {session_id_or_phone}")
|
||||
return None
|
||||
|
||||
try:
|
||||
session_dict = json.loads(data)
|
||||
session = ConversationSessionDTO.model_validate(session_dict)
|
||||
logger.debug(f"Retrieved session from Redis: {session_id}")
|
||||
return session
|
||||
except Exception as e:
|
||||
logger.error(f"Error deserializing session {session_id}: {str(e)}")
|
||||
return None
|
||||
|
||||
async def save_session(self, session: ConversationSessionDTO) -> bool:
|
||||
"""
|
||||
Save conversation session to Redis with TTL.
|
||||
|
||||
Also stores phone-to-session mapping for lookup by phone number.
|
||||
"""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._session_key(session.sessionId)
|
||||
phone_key = self._phone_to_session_key(session.telefono)
|
||||
|
||||
try:
|
||||
# Save session data
|
||||
data = session.model_dump_json(by_alias=False)
|
||||
await self.redis.setex(key, self.session_ttl, data)
|
||||
|
||||
# Save phone-to-session mapping
|
||||
await self.redis.setex(phone_key, self.session_ttl, session.sessionId)
|
||||
|
||||
logger.debug(
|
||||
f"Saved session to Redis: {session.sessionId} for phone: {session.telefono}"
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving session {session.sessionId} to Redis: {str(e)}")
|
||||
return False
|
||||
|
||||
async def delete_session(self, session_id: str) -> bool:
|
||||
"""Delete conversation session from Redis."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._session_key(session_id)
|
||||
|
||||
try:
|
||||
result = await self.redis.delete(key)
|
||||
logger.debug(f"Deleted session from Redis: {session_id}")
|
||||
return result > 0
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting session {session_id} from Redis: {str(e)}")
|
||||
return False
|
||||
|
||||
async def exists(self, session_id: str) -> bool:
|
||||
"""Check if session exists in Redis."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._session_key(session_id)
|
||||
return await self.redis.exists(key) > 0
|
||||
|
||||
# ====== Message Methods ======
|
||||
|
||||
def _messages_key(self, session_id: str) -> str:
|
||||
"""Generate Redis key for conversation messages."""
|
||||
return f"conversation:messages:{session_id}"
|
||||
|
||||
async def save_message(self, session_id: str, message) -> bool:
|
||||
"""
|
||||
Save a conversation message to Redis sorted set.
|
||||
|
||||
Messages are stored in a sorted set with timestamp as score.
|
||||
|
||||
Args:
|
||||
session_id: The session ID
|
||||
message: ConversationMessageDTO or ConversationEntryDTO
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._messages_key(session_id)
|
||||
|
||||
try:
|
||||
# Convert message to JSON
|
||||
message_data = message.model_dump_json(by_alias=False)
|
||||
# Use timestamp as score (in milliseconds)
|
||||
score = message.timestamp.timestamp() * 1000
|
||||
|
||||
# Add to sorted set
|
||||
await self.redis.zadd(key, {message_data: score})
|
||||
# Set TTL on the messages key to match session TTL
|
||||
await self.redis.expire(key, self.session_ttl)
|
||||
|
||||
logger.debug(f"Saved message to Redis: {session_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving message to Redis for session {session_id}: {str(e)}")
|
||||
return False
|
||||
|
||||
async def get_messages(self, session_id: str) -> list:
|
||||
"""
|
||||
Retrieve all conversation messages for a session from Redis.
|
||||
|
||||
Returns messages ordered by timestamp (oldest first).
|
||||
|
||||
Args:
|
||||
session_id: The session ID
|
||||
|
||||
Returns:
|
||||
List of message dictionaries (parsed from JSON)
|
||||
"""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._messages_key(session_id)
|
||||
|
||||
try:
|
||||
# Get all messages from sorted set (ordered by score/timestamp)
|
||||
message_strings = await self.redis.zrange(key, 0, -1)
|
||||
|
||||
if not message_strings:
|
||||
logger.debug(f"No messages found in Redis for session: {session_id}")
|
||||
return []
|
||||
|
||||
# Parse JSON strings to dictionaries
|
||||
messages = []
|
||||
for msg_str in message_strings:
|
||||
try:
|
||||
messages.append(json.loads(msg_str))
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Error parsing message JSON: {str(e)}")
|
||||
continue
|
||||
|
||||
logger.debug(f"Retrieved {len(messages)} messages from Redis for session: {session_id}")
|
||||
return messages
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving messages from Redis for session {session_id}: {str(e)}")
|
||||
return []
|
||||
|
||||
# ====== Notification Methods ======
|
||||
|
||||
def _notification_key(self, session_id: str) -> str:
|
||||
"""Generate Redis key for notification session."""
|
||||
return f"notification:{session_id}"
|
||||
|
||||
def _phone_to_notification_key(self, phone: str) -> str:
|
||||
"""Generate Redis key for phone-to-notification mapping."""
|
||||
return f"notification:phone_to_notification:{phone}"
|
||||
|
||||
async def save_or_append_notification(self, new_entry: NotificationDTO) -> None:
|
||||
"""
|
||||
Save or append notification entry to session.
|
||||
|
||||
Args:
|
||||
new_entry: Notification entry to save
|
||||
|
||||
Raises:
|
||||
ValueError: If phone number is missing
|
||||
"""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
phone_number = new_entry.telefono
|
||||
if not phone_number or not phone_number.strip():
|
||||
raise ValueError("Phone number is required to manage notification entries")
|
||||
|
||||
# Use phone number as session ID for notifications
|
||||
notification_session_id = phone_number
|
||||
|
||||
# Get existing session or create new one
|
||||
existing_session = await self.get_notification_session(notification_session_id)
|
||||
|
||||
if existing_session:
|
||||
# Append to existing session
|
||||
updated_notifications = existing_session.notificaciones + [new_entry]
|
||||
updated_session = NotificationSessionDTO(
|
||||
session_id=notification_session_id,
|
||||
telefono=phone_number,
|
||||
fecha_creacion=existing_session.fecha_creacion,
|
||||
ultima_actualizacion=datetime.now(),
|
||||
notificaciones=updated_notifications,
|
||||
)
|
||||
else:
|
||||
# Create new session
|
||||
updated_session = NotificationSessionDTO(
|
||||
session_id=notification_session_id,
|
||||
telefono=phone_number,
|
||||
fecha_creacion=datetime.now(),
|
||||
ultima_actualizacion=datetime.now(),
|
||||
notificaciones=[new_entry],
|
||||
)
|
||||
|
||||
# Save to Redis
|
||||
await self._cache_notification_session(updated_session)
|
||||
|
||||
async def _cache_notification_session(
|
||||
self, session: NotificationSessionDTO
|
||||
) -> bool:
|
||||
"""Cache notification session in Redis."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._notification_key(session.sessionId)
|
||||
phone_key = self._phone_to_notification_key(session.telefono)
|
||||
|
||||
try:
|
||||
# Save notification session
|
||||
data = session.model_dump_json(by_alias=False)
|
||||
await self.redis.setex(key, self.notification_ttl, data)
|
||||
|
||||
# Save phone-to-session mapping
|
||||
await self.redis.setex(phone_key, self.notification_ttl, session.sessionId)
|
||||
|
||||
logger.debug(f"Cached notification session: {session.sessionId}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error caching notification session {session.sessionId}: {str(e)}"
|
||||
)
|
||||
return False
|
||||
|
||||
async def get_notification_session(
|
||||
self, session_id: str
|
||||
) -> NotificationSessionDTO | None:
|
||||
"""Retrieve notification session from Redis."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._notification_key(session_id)
|
||||
data = await self.redis.get(key)
|
||||
|
||||
if not data:
|
||||
logger.debug(f"Notification session not found in Redis: {session_id}")
|
||||
return None
|
||||
|
||||
try:
|
||||
session_dict = json.loads(data)
|
||||
session = NotificationSessionDTO.model_validate(session_dict)
|
||||
logger.info(f"Notification session {session_id} retrieved from Redis")
|
||||
return session
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error deserializing notification session {session_id}: {str(e)}"
|
||||
)
|
||||
return None
|
||||
|
||||
async def get_notification_id_for_phone(self, phone: str) -> str | None:
|
||||
"""Get notification session ID for a phone number."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
key = self._phone_to_notification_key(phone)
|
||||
session_id = await self.redis.get(key)
|
||||
|
||||
if session_id:
|
||||
logger.info(f"Session ID {session_id} found for phone")
|
||||
else:
|
||||
logger.debug("Session ID not found for phone")
|
||||
|
||||
return session_id
|
||||
|
||||
async def delete_notification_session(self, phone_number: str) -> bool:
|
||||
"""Delete notification session from Redis."""
|
||||
if not self.redis:
|
||||
raise RuntimeError("Redis client not connected")
|
||||
|
||||
notification_key = self._notification_key(phone_number)
|
||||
phone_key = self._phone_to_notification_key(phone_number)
|
||||
|
||||
try:
|
||||
logger.info(f"Deleting notification session for phone {phone_number}")
|
||||
await self.redis.delete(notification_key)
|
||||
await self.redis.delete(phone_key)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error deleting notification session for phone {phone_number}: {str(e)}"
|
||||
)
|
||||
return False
|
||||
5
src/capa_de_integracion/utils/__init__.py
Normal file
5
src/capa_de_integracion/utils/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Utilities module."""
|
||||
|
||||
from .session_id import SessionIdGenerator
|
||||
|
||||
__all__ = ["SessionIdGenerator"]
|
||||
23
src/capa_de_integracion/utils/session_id.py
Normal file
23
src/capa_de_integracion/utils/session_id.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Copyright 2025 Google. This software is provided as-is, without warranty or
|
||||
representation for any use or purpose. Your use of it is subject to your
|
||||
agreement with Google.
|
||||
|
||||
Session ID generator utility.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
|
||||
class SessionIdGenerator:
|
||||
"""Generate unique session IDs."""
|
||||
|
||||
@staticmethod
|
||||
def generate() -> str:
|
||||
"""Generate a new unique session ID."""
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
def generate_session_id() -> str:
|
||||
"""Generate a new unique session ID (convenience function)."""
|
||||
return SessionIdGenerator.generate()
|
||||
Reference in New Issue
Block a user