This commit is contained in:
2026-02-19 17:50:14 +00:00
parent da95a64fb7
commit 6f629c53a6
171 changed files with 7281 additions and 1144 deletions

View File

@@ -0,0 +1,25 @@
"""Services module."""
from .dialogflow_client import DialogflowClientService
from .gemini_client import GeminiClientService, GeminiClientException
from .conversation_manager import ConversationManagerService
from .message_filter import MessageEntryFilter
from .notification_manager import NotificationManagerService
from .notification_context_resolver import NotificationContextResolver
from .dlp_service import DLPService
from .llm_response_tuner import LlmResponseTunerService
from .mappers import NotificationContextMapper, ConversationContextMapper
__all__ = [
"DialogflowClientService",
"GeminiClientService",
"GeminiClientException",
"ConversationManagerService",
"MessageEntryFilter",
"NotificationManagerService",
"NotificationContextResolver",
"DLPService",
"LlmResponseTunerService",
"NotificationContextMapper",
"ConversationContextMapper",
]

View File

@@ -0,0 +1,847 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Conversation manager service - central orchestrator for conversations.
"""
import logging
import uuid
from datetime import datetime, timedelta
from ..config import Settings
from ..models import (
ExternalConvRequestDTO,
DetectIntentRequestDTO,
DetectIntentResponseDTO,
ConversationSessionDTO,
ConversationEntryDTO,
QueryInputDTO,
TextInputDTO,
QueryParamsDTO,
)
from ..utils import SessionIdGenerator
from .dialogflow_client import DialogflowClientService
from .redis_service import RedisService
from .firestore_service import FirestoreService
from .dlp_service import DLPService
from .message_filter import MessageEntryFilter
from .notification_context_resolver import NotificationContextResolver
from .llm_response_tuner import LlmResponseTunerService
from .mappers import NotificationContextMapper, ConversationContextMapper
from .quick_reply_content import QuickReplyContentService
logger = logging.getLogger(__name__)
class ConversationManagerService:
"""
Central orchestrator for managing user conversations.
Integrates Data Loss Prevention (DLP), message classification, routing based
on session context (pantallaContexto for quick replies), and hybrid AI logic
for notification-driven conversations.
Routes traffic based on session context:
1. If 'pantallaContexto' is present (not stale), delegates to QuickRepliesManagerService
2. Otherwise, uses MessageEntryFilter (Gemini) to classify message:
a) CONVERSATION: Standard Dialogflow flow with conversation history
b) NOTIFICATION: Uses NotificationContextResolver (Gemini) to answer or delegate to Dialogflow
All conversation turns are persisted using reactive write-back pattern:
Redis first (fast), then async to Firestore (persistent).
"""
SESSION_RESET_THRESHOLD_MINUTES = 30
SCREEN_CONTEXT_TIMEOUT_MINUTES = 10
CONV_HISTORY_PARAM = "conversation_history"
HISTORY_PARAM = "historial"
def __init__(
self,
settings: Settings,
dialogflow_client: DialogflowClientService,
redis_service: RedisService,
firestore_service: FirestoreService,
dlp_service: DLPService,
message_filter: MessageEntryFilter,
notification_context_resolver: NotificationContextResolver,
llm_response_tuner: LlmResponseTunerService,
):
"""Initialize conversation manager."""
self.settings = settings
self.dialogflow_client = dialogflow_client
self.redis_service = redis_service
self.firestore_service = firestore_service
self.dlp_service = dlp_service
self.message_filter = message_filter
self.notification_context_resolver = notification_context_resolver
self.llm_response_tuner = llm_response_tuner
# Initialize mappers
self.notification_mapper = NotificationContextMapper()
self.conversation_mapper = ConversationContextMapper(
message_limit=settings.conversation_context_message_limit,
days_limit=settings.conversation_context_days_limit,
)
# Quick reply service
self.quick_reply_service = QuickReplyContentService(settings)
logger.info("ConversationManagerService initialized successfully")
async def manage_conversation(
self, request: ExternalConvRequestDTO
) -> DetectIntentResponseDTO:
"""
Main entry point for managing conversations.
Flow:
1. Obfuscate message with DLP
2. Check for pantallaContexto (quick replies mode)
3. If no pantallaContexto, continue with standard flow
4. Classify message (CONVERSATION vs NOTIFICATION)
5. Route to appropriate handler
Args:
request: External conversation request from client
Returns:
Detect intent response from Dialogflow
"""
try:
# Step 1: DLP obfuscation
obfuscated_message = await self.dlp_service.get_obfuscated_string(
request.mensaje,
self.settings.dlp_template_complete_flow,
)
obfuscated_request = ExternalConvRequestDTO(
mensaje=obfuscated_message,
usuario=request.usuario,
canal=request.canal,
tipo=request.tipo,
pantalla_contexto=request.pantalla_contexto,
)
# Step 2: Check for pantallaContexto in existing session
telefono = request.usuario.telefono
existing_session = await self.redis_service.get_session(telefono)
if existing_session and existing_session.pantallaContexto:
# Check if pantallaContexto is stale (10 minutes)
if self._is_pantalla_context_valid(existing_session):
logger.info(
f"Detected 'pantallaContexto' in session: {existing_session.pantallaContexto}. "
f"Delegating to QuickReplies flow."
)
return await self._manage_quick_reply_conversation(
obfuscated_request, existing_session
)
else:
logger.info(
"Detected STALE 'pantallaContexto'. Ignoring and proceeding with normal flow."
)
# Step 3: Continue with standard conversation flow
return await self._continue_managing_conversation(obfuscated_request)
except Exception as e:
logger.error(f"Error managing conversation: {str(e)}", exc_info=True)
raise
def _is_pantalla_context_valid(self, session: ConversationSessionDTO) -> bool:
"""Check if pantallaContexto is still valid (not stale)."""
if not session.lastModified:
return False
time_diff = datetime.now() - session.lastModified
return time_diff < timedelta(minutes=self.SCREEN_CONTEXT_TIMEOUT_MINUTES)
async def _manage_quick_reply_conversation(
self,
request: ExternalConvRequestDTO,
session: ConversationSessionDTO,
) -> DetectIntentResponseDTO:
"""
Handle conversation within Quick Replies context.
User is in a quick reply screen, treat their message as a FAQ query.
Args:
request: External request
session: Existing session with pantallaContexto
Returns:
Dialogflow response
"""
# Build Dialogflow request with pantallaContexto
dialogflow_request = self._build_dialogflow_request(
request, session, request.mensaje
)
# Add pantallaContexto to parameters
if dialogflow_request.query_params:
dialogflow_request.query_params.parameters["pantalla_contexto"] = (
session.pantallaContexto
)
# Call Dialogflow
response = await self.dialogflow_client.detect_intent(
session.sessionId, dialogflow_request
)
# Persist conversation turn
await self._persist_conversation_turn(session, request.mensaje, response)
return response
async def _continue_managing_conversation(
self, request: ExternalConvRequestDTO
) -> DetectIntentResponseDTO:
"""
Continue with standard conversation flow.
Steps:
1. Get or create session
2. Check for active notifications
3. Classify message (CONVERSATION vs NOTIFICATION)
4. Route to appropriate handler
Args:
request: External conversation request
Returns:
Dialogflow response
"""
telefono = request.usuario.telefono
nickname = (
request.usuario.nickname if hasattr(request.usuario, "nickname") else None
)
if not telefono or not telefono.strip():
raise ValueError("Phone number is required to manage conversation sessions")
logger.info(f"Primary Check (Redis): Looking up session for phone: {telefono}")
# Get session from Redis
session = await self.redis_service.get_session(telefono)
if session:
return await self._handle_message_classification(request, session)
else:
# No session in Redis, check Firestore
logger.info(
"No session found in Redis. Performing full lookup to Firestore."
)
return await self._full_lookup_and_process(request, telefono, nickname)
async def _handle_message_classification(
self,
request: ExternalConvRequestDTO,
session: ConversationSessionDTO,
) -> DetectIntentResponseDTO:
"""
Classify message using MessageEntryFilter and route accordingly.
Checks for active notifications and uses Gemini to determine if the
user's message is about the notification or general conversation.
Args:
request: External request
session: Existing conversation session
Returns:
Dialogflow response
"""
telefono = request.usuario.telefono
user_message = request.mensaje
# Get active notification for this phone
notification_id = await self.redis_service.get_notification_id_for_phone(
telefono
)
if not notification_id:
# No notification, proceed with standard conversation
return await self._proceed_with_conversation(request, session)
# Get notification session
notification_session = await self.redis_service.get_notification_session(
notification_id
)
if not notification_session or not notification_session.notificaciones:
return await self._proceed_with_conversation(request, session)
# Find most recent active notification
active_notification = None
for notif in sorted(
notification_session.notificaciones,
key=lambda n: n.timestampCreacion,
reverse=True,
):
if notif.status == "active":
active_notification = notif
break
if not active_notification:
return await self._proceed_with_conversation(request, session)
# Get conversation history from Redis (fast in-memory cache)
messages_data = await self.redis_service.get_messages(session.sessionId)
# Convert message dicts to ConversationEntryDTO objects
conversation_entries = [
ConversationEntryDTO.model_validate(msg) for msg in messages_data
]
conversation_history = self.conversation_mapper.to_text_from_entries(
conversation_entries
)
if not conversation_history:
conversation_history = ""
# Classify message using MessageEntryFilter (Gemini)
notification_text = self.notification_mapper.to_text(active_notification)
classification = await self.message_filter.classify_message(
query_input_text=user_message,
notifications_json=notification_text,
conversation_json=conversation_history,
)
logger.info(f"Message classified as: {classification}")
if classification == self.message_filter.CATEGORY_NOTIFICATION:
# Route to notification conversation flow
return await self._start_notification_conversation(
request, active_notification, session, conversation_entries
)
else:
# Route to standard conversation flow
return await self._proceed_with_conversation(request, session)
async def _proceed_with_conversation(
self,
request: ExternalConvRequestDTO,
session: ConversationSessionDTO,
) -> DetectIntentResponseDTO:
"""
Proceed with standard Dialogflow conversation.
Checks session age:
- If < 30 minutes: Continue with existing session
- If >= 30 minutes: Create new session and inject conversation history
Args:
request: External request
session: Existing session
Returns:
Dialogflow response
"""
datetime.now()
# Check session age
if self._is_session_valid(session):
logger.info(
f"Recent Session Found: Session {session.sessionId} is within "
f"the {self.SESSION_RESET_THRESHOLD_MINUTES}-minute threshold. "
f"Proceeding to Dialogflow."
)
return await self._process_dialogflow_request(
session, request, is_new_session=False
)
else:
# Session expired, create new session with history injection
logger.info(
f"Old Session Found: Session {session.sessionId} is older than "
f"the {self.SESSION_RESET_THRESHOLD_MINUTES}-minute threshold."
)
# Create new session
new_session_id = SessionIdGenerator.generate()
telefono = request.usuario.telefono
nickname = (
request.usuario.nickname
if hasattr(request.usuario, "nickname")
else None
)
user_id = nickname or telefono
new_session = ConversationSessionDTO.create(
session_id=new_session_id,
user_id=user_id,
telefono=telefono,
)
logger.info(
f"Creating new session {new_session_id} from old session "
f"{session.sessionId} due to timeout."
)
# Get conversation history from old session
old_entries = await self.firestore_service.get_entries(
session.sessionId,
limit=self.settings.conversation_context_message_limit,
)
# Apply limits (30 days / 60 messages / 50KB)
conversation_history = self.conversation_mapper.to_text_with_limits(
session, old_entries
)
# Build request with history parameter
dialogflow_request = self._build_dialogflow_request(
request, new_session, request.mensaje
)
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
conversation_history
)
return await self._process_dialogflow_request(
new_session,
request,
is_new_session=True,
dialogflow_request=dialogflow_request,
)
async def _start_notification_conversation(
self,
request: ExternalConvRequestDTO,
notification: any,
session: ConversationSessionDTO,
conversation_entries: list[ConversationEntryDTO],
) -> DetectIntentResponseDTO:
"""
Start notification-driven conversation.
Uses NotificationContextResolver (Gemini) to determine if the question
can be answered directly from notification metadata or should be
delegated to Dialogflow.
Args:
request: External request
notification: Active notification
session: Conversation session
conversation_entries: Recent conversation history
Returns:
Dialogflow response
"""
user_message = request.mensaje
telefono = request.usuario.telefono
# Prepare context for NotificationContextResolver
self.notification_mapper.to_text(notification)
notification_json = self.notification_mapper.to_json(notification)
conversation_history = self.conversation_mapper.to_text_from_entries(
conversation_entries
)
# Convert notification parameters to metadata string
# Filter to only include parameters starting with "notification_po_"
metadata = ""
if notification.parametros:
import json
filtered_params = {
key: value
for key, value in notification.parametros.items()
if key.startswith("notification_po_")
}
metadata = json.dumps(filtered_params, ensure_ascii=False)
# Resolve context using Gemini
resolution = await self.notification_context_resolver.resolve_context(
query_input_text=user_message,
notifications_json=notification_json,
conversation_json=conversation_history,
metadata=metadata,
user_id=session.userId,
session_id=session.sessionId,
user_phone_number=telefono,
)
if resolution == self.notification_context_resolver.CATEGORY_DIALOGFLOW:
# Delegate to Dialogflow
logger.info(
"NotificationContextResolver returned DIALOGFLOW. Sending to Dialogflow."
)
dialogflow_request = self._build_dialogflow_request(
request, session, user_message
)
# Check if session is older than 30 minutes
from datetime import datetime, timedelta
time_diff = datetime.now() - session.lastModified
if time_diff >= timedelta(minutes=self.SESSION_RESET_THRESHOLD_MINUTES):
# Session is old, inject conversation history
logger.info(
f"Session is older than {self.SESSION_RESET_THRESHOLD_MINUTES} minutes. "
"Injecting conversation history."
)
# Get conversation history with limits
firestore_entries = await self.firestore_service.get_entries(
session.sessionId
)
conversation_history = self.conversation_mapper.to_text_with_limits(
session, firestore_entries
)
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
conversation_history
)
# Always add notification parameters
if notification.parametros:
dialogflow_request.query_params.parameters.update(notification.parametros)
response = await self.dialogflow_client.detect_intent(
session.sessionId, dialogflow_request
)
await self._persist_conversation_turn(session, user_message, response)
return response
else:
# LLM provided direct answer
logger.info(
"NotificationContextResolver provided direct answer. Storing in Redis."
)
# Store LLM response in Redis with UUID
llm_uuid = str(uuid.uuid4())
await self.llm_response_tuner.set_value(llm_uuid, resolution)
# Send LLM_RESPONSE_PROCESSED event to Dialogflow
event_params = {"uuid": llm_uuid}
response = await self.dialogflow_client.detect_intent_event(
session_id=session.sessionId,
event_name="LLM_RESPONSE_PROCESSED",
parameters=event_params,
language_code=self.settings.dialogflow_default_language,
)
# Persist LLM turn
await self._persist_llm_turn(session, user_message, resolution)
return response
async def _full_lookup_and_process(
self,
request: ExternalConvRequestDTO,
telefono: str,
nickname: str | None,
) -> DetectIntentResponseDTO:
"""
Perform full lookup from Firestore and process conversation.
Called when session is not found in Redis.
Args:
request: External request
telefono: User phone number
nickname: User nickname
Returns:
Dialogflow response
"""
# Try Firestore (by phone number)
session = await self.firestore_service.get_session_by_phone(telefono)
if session:
# Get conversation history
old_entries = await self.firestore_service.get_entries(
session.sessionId,
limit=self.settings.conversation_context_message_limit,
)
# Create new session with history injection
new_session_id = SessionIdGenerator.generate()
user_id = nickname or telefono
new_session = ConversationSessionDTO.create(
session_id=new_session_id,
user_id=user_id,
telefono=telefono,
)
logger.info(f"Creating new session {new_session_id} after full lookup.")
# Apply history limits
conversation_history = self.conversation_mapper.to_text_with_limits(
session, old_entries
)
# Build request with history
dialogflow_request = self._build_dialogflow_request(
request, new_session, request.mensaje
)
dialogflow_request.query_params.parameters[self.CONV_HISTORY_PARAM] = (
conversation_history
)
return await self._process_dialogflow_request(
new_session,
request,
is_new_session=True,
dialogflow_request=dialogflow_request,
)
else:
# No session found, create brand new session
logger.info(
f"No existing session found for {telefono}. Creating new session."
)
return await self._create_new_session_and_process(
request, telefono, nickname
)
async def _create_new_session_and_process(
self,
request: ExternalConvRequestDTO,
telefono: str,
nickname: str | None,
) -> DetectIntentResponseDTO:
"""Create brand new session and process request."""
session_id = SessionIdGenerator.generate()
user_id = nickname or telefono
session = ConversationSessionDTO.create(
session_id=session_id,
user_id=user_id,
telefono=telefono,
)
# Save to Redis and Firestore
await self.redis_service.save_session(session)
await self.firestore_service.save_session(session)
logger.info(f"Created new session: {session_id} for phone: {telefono}")
return await self._process_dialogflow_request(
session, request, is_new_session=True
)
async def _process_dialogflow_request(
self,
session: ConversationSessionDTO,
request: ExternalConvRequestDTO,
is_new_session: bool,
dialogflow_request: DetectIntentRequestDTO | None = None,
) -> DetectIntentResponseDTO:
"""
Process Dialogflow request and persist conversation turn.
Args:
session: Conversation session
request: External request
is_new_session: Whether this is a new session
dialogflow_request: Pre-built Dialogflow request (optional)
Returns:
Dialogflow response
"""
# Build request if not provided
if not dialogflow_request:
dialogflow_request = self._build_dialogflow_request(
request, session, request.mensaje
)
# Call Dialogflow
response = await self.dialogflow_client.detect_intent(
session.sessionId, dialogflow_request
)
# Persist conversation turn
await self._persist_conversation_turn(session, request.mensaje, response)
logger.info(
f"Successfully processed conversation for session: {session.sessionId}"
)
return response
def _is_session_valid(self, session: ConversationSessionDTO) -> bool:
"""Check if session is within 30-minute threshold."""
if not session.lastModified:
return False
time_diff = datetime.now() - session.lastModified
return time_diff < timedelta(minutes=self.SESSION_RESET_THRESHOLD_MINUTES)
def _build_dialogflow_request(
self,
external_request: ExternalConvRequestDTO,
session: ConversationSessionDTO,
message: str,
) -> DetectIntentRequestDTO:
"""Build Dialogflow detect intent request."""
# Build query input
query_input = QueryInputDTO(
text=TextInputDTO(text=message),
language_code=self.settings.dialogflow_default_language,
)
# Build query parameters with session context
parameters = {
"telefono": session.telefono,
"usuario_id": session.userId,
}
# Add pantalla_contexto if present
if session.pantallaContexto:
parameters["pantalla_contexto"] = session.pantallaContexto
query_params = QueryParamsDTO(parameters=parameters)
return DetectIntentRequestDTO(
query_input=query_input,
query_params=query_params,
)
async def _persist_conversation_turn(
self,
session: ConversationSessionDTO,
user_message: str,
response: DetectIntentResponseDTO,
) -> None:
"""
Persist conversation turn using reactive write-back pattern.
Saves to Redis first, then async to Firestore.
"""
try:
# Update session with last message
updated_session = ConversationSessionDTO(
**session.model_dump(),
lastMessage=user_message,
lastModified=datetime.now(),
)
# Create conversation entry
response_text = ""
intent = None
parameters = None
if response.queryResult:
response_text = response.queryResult.text or ""
intent = response.queryResult.intent
parameters = response.queryResult.parameters
user_entry = ConversationEntryDTO(
entity="USUARIO",
type="CONVERSACION",
timestamp=datetime.now(),
text=user_message,
parameters=None,
intent=None,
)
agent_entry = ConversationEntryDTO(
entity="AGENTE",
type="CONVERSACION",
timestamp=datetime.now(),
text=response_text,
parameters=parameters,
intent=intent,
)
# Save to Redis (fast, blocking)
await self.redis_service.save_session(updated_session)
await self.redis_service.save_message(session.sessionId, user_entry)
await self.redis_service.save_message(session.sessionId, agent_entry)
# Save to Firestore (persistent, non-blocking write-back)
import asyncio
async def save_to_firestore():
try:
await self.firestore_service.save_session(updated_session)
await self.firestore_service.save_entry(session.sessionId, user_entry)
await self.firestore_service.save_entry(session.sessionId, agent_entry)
logger.debug(
f"Asynchronously (Write-Back): Entry successfully saved to Firestore for session: {session.sessionId}"
)
except Exception as fs_error:
logger.error(
f"Asynchronously (Write-Back): Failed to save to Firestore for session {session.sessionId}: {str(fs_error)}",
exc_info=True,
)
# Fire and forget - don't await
asyncio.create_task(save_to_firestore())
logger.debug(f"Entry saved to Redis for session: {session.sessionId}")
except Exception as e:
logger.error(
f"Error persisting conversation turn for session {session.sessionId}: {str(e)}",
exc_info=True,
)
# Don't fail the request if persistence fails
async def _persist_llm_turn(
self,
session: ConversationSessionDTO,
user_message: str,
llm_response: str,
) -> None:
"""Persist LLM-generated conversation turn."""
try:
# Update session
updated_session = ConversationSessionDTO(
**session.model_dump(),
lastMessage=user_message,
lastModified=datetime.now(),
)
user_entry = ConversationEntryDTO(
entity="USUARIO",
type="CONVERSACION",
timestamp=datetime.now(),
text=user_message,
parameters=notification.parametros,
intent=None,
)
llm_entry = ConversationEntryDTO(
entity="LLM",
type="LLM",
timestamp=datetime.now(),
text=llm_response,
parameters=None,
intent=None,
)
# Save to Redis (fast, blocking)
await self.redis_service.save_session(updated_session)
await self.redis_service.save_message(session.sessionId, user_entry)
await self.redis_service.save_message(session.sessionId, llm_entry)
# Save to Firestore (persistent, non-blocking write-back)
import asyncio
async def save_to_firestore():
try:
await self.firestore_service.save_session(updated_session)
await self.firestore_service.save_entry(session.sessionId, user_entry)
await self.firestore_service.save_entry(session.sessionId, llm_entry)
logger.debug(
f"Asynchronously (Write-Back): LLM entry successfully saved to Firestore for session: {session.sessionId}"
)
except Exception as fs_error:
logger.error(
f"Asynchronously (Write-Back): Failed to save LLM entry to Firestore for session {session.sessionId}: {str(fs_error)}",
exc_info=True,
)
# Fire and forget - don't await
asyncio.create_task(save_to_firestore())
logger.debug(f"LLM entry saved to Redis for session: {session.sessionId}")
except Exception as e:
logger.error(
f"Error persisting LLM turn for session {session.sessionId}: {str(e)}",
exc_info=True,
)

View File

@@ -0,0 +1,133 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Data purge service for Redis and Firestore.
"""
import logging
from google.cloud import firestore
from ..config import Settings
from .redis_service import RedisService
logger = logging.getLogger(__name__)
class DataPurgeService:
"""Service for purging all data from Redis and Firestore."""
def __init__(self, settings: Settings, redis_service: RedisService):
"""Initialize data purge service."""
self.settings = settings
self.redis_service = redis_service
self.db = firestore.AsyncClient(
project=settings.gcp_project_id,
database=settings.firestore_database_id,
)
async def purge_all_data(self) -> None:
"""Purge all data from Redis and Firestore."""
try:
await self._purge_redis()
await self._purge_firestore()
logger.info("Successfully purged all data from Redis and Firestore")
except Exception as e:
logger.error(f"Error purging data: {str(e)}", exc_info=True)
raise
async def _purge_redis(self) -> None:
"""Purge all data from Redis."""
logger.info("Starting Redis data purge")
try:
if not self.redis_service.redis:
raise RuntimeError("Redis client not connected")
await self.redis_service.redis.flushdb()
logger.info("Successfully purged all data from Redis")
except Exception as e:
logger.error(f"Error purging data from Redis: {str(e)}", exc_info=True)
raise
async def _purge_firestore(self) -> None:
"""Purge all data from Firestore."""
logger.info("Starting Firestore data purge")
try:
app_id = self.settings.gcp_project_id
conversations_path = f"artifacts/{app_id}/conversations"
notifications_path = f"artifacts/{app_id}/notifications"
# Delete mensajes subcollections from conversations
logger.info(
f"Deleting 'mensajes' sub-collections from '{conversations_path}'"
)
try:
conversations_ref = self.db.collection(conversations_path)
async for doc in conversations_ref.stream():
mensajes_ref = doc.reference.collection("mensajes")
await self._delete_collection(mensajes_ref, 50)
except Exception as e:
if "NOT_FOUND" in str(e):
logger.warning(
f"Collection '{conversations_path}' not found, skipping"
)
else:
raise
# Delete conversations collection
logger.info(f"Deleting collection: {conversations_path}")
try:
conversations_ref = self.db.collection(conversations_path)
await self._delete_collection(conversations_ref, 50)
except Exception as e:
if "NOT_FOUND" in str(e):
logger.warning(
f"Collection '{conversations_path}' not found, skipping"
)
else:
raise
# Delete notifications collection
logger.info(f"Deleting collection: {notifications_path}")
try:
notifications_ref = self.db.collection(notifications_path)
await self._delete_collection(notifications_ref, 50)
except Exception as e:
if "NOT_FOUND" in str(e):
logger.warning(
f"Collection '{notifications_path}' not found, skipping"
)
else:
raise
logger.info("Successfully purged Firestore collections")
except Exception as e:
logger.error(
f"Error purging Firestore collections: {str(e)}", exc_info=True
)
raise
async def _delete_collection(self, coll_ref, batch_size: int) -> None:
"""Delete a Firestore collection in batches."""
docs = []
async for doc in coll_ref.limit(batch_size).stream():
docs.append(doc)
if not docs:
return
# Delete documents in batch
batch = self.db.batch()
for doc in docs:
batch.delete(doc.reference)
await batch.commit()
# Recursively delete remaining documents
if len(docs) == batch_size:
await self._delete_collection(coll_ref, batch_size)
async def close(self):
"""Close Firestore client."""
await self.db.close()

View File

@@ -0,0 +1,285 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Dialogflow CX client service for intent detection.
"""
import logging
from google.cloud.dialogflowcx_v3 import SessionsAsyncClient
from google.cloud.dialogflowcx_v3.types import (
DetectIntentRequest,
QueryInput,
TextInput,
EventInput,
QueryParameters,
)
from google.api_core.exceptions import (
GoogleAPIError,
InternalServerError,
ServiceUnavailable,
)
from tenacity import (
retry,
stop_after_attempt,
wait_exponential,
retry_if_exception_type,
)
from ..config import Settings
from ..models import DetectIntentRequestDTO, DetectIntentResponseDTO, QueryResultDTO
logger = logging.getLogger(__name__)
class DialogflowClientService:
"""Service for interacting with Dialogflow CX API."""
def __init__(self, settings: Settings):
"""Initialize Dialogflow client."""
self.settings = settings
self.project_id = settings.dialogflow_project_id
self.location = settings.dialogflow_location
self.agent_id = settings.dialogflow_agent_id
self.default_language = settings.dialogflow_default_language
# Initialize async client
endpoint = settings.dialogflow_endpoint
client_options = {"api_endpoint": endpoint}
self.client = SessionsAsyncClient(client_options=client_options)
logger.info(
f"Dialogflow CX SessionsClient initialized for endpoint: {endpoint}"
)
logger.info(f"Agent ID: {self.agent_id}")
def _build_session_path(self, session_id: str) -> str:
"""Build Dialogflow session path."""
return self.client.session_path(
project=self.project_id,
location=self.location,
agent=self.agent_id,
session=session_id,
)
def _map_query_input(self, query_input_dto) -> QueryInput:
"""Map QueryInputDTO to Dialogflow QueryInput."""
language_code = query_input_dto.language_code or self.default_language
if query_input_dto.text and query_input_dto.text.text:
return QueryInput(
text=TextInput(text=query_input_dto.text.text),
language_code=language_code,
)
elif query_input_dto.event and query_input_dto.event.event:
return QueryInput(
event=EventInput(event=query_input_dto.event.event),
language_code=language_code,
)
else:
raise ValueError("Either text or event input must be provided")
def _map_query_params(self, query_params_dto) -> QueryParameters | None:
"""Map QueryParamsDTO to Dialogflow QueryParameters."""
if not query_params_dto or not query_params_dto.parameters:
return None
return QueryParameters(parameters=query_params_dto.parameters)
def _extract_response_text(self, response) -> str:
"""Extract text from Dialogflow response messages."""
texts = []
for msg in response.query_result.response_messages:
if hasattr(msg, "text") and msg.text.text:
texts.extend(msg.text.text)
return " ".join(texts) if texts else ""
@retry(
stop=stop_after_attempt(3),
wait=wait_exponential(multiplier=1, min=1, max=10),
retry=retry_if_exception_type((InternalServerError, ServiceUnavailable)),
reraise=True,
)
async def detect_intent(
self, session_id: str, request_dto: DetectIntentRequestDTO
) -> DetectIntentResponseDTO:
"""
Detect intent from user input using Dialogflow CX.
Args:
session_id: Unique session identifier
request_dto: Detect intent request
Returns:
Detect intent response with query results
Raises:
GoogleAPIError: If Dialogflow API call fails
"""
if not session_id:
raise ValueError("Session ID cannot be empty")
if not request_dto:
raise ValueError("Request DTO cannot be None")
logger.info(f"Initiating detectIntent for session: {session_id}")
try:
# Build request
session_path = self._build_session_path(session_id)
query_input = self._map_query_input(request_dto.query_input)
query_params = self._map_query_params(request_dto.query_params)
detect_request = DetectIntentRequest(
session=session_path,
query_input=query_input,
query_params=query_params,
)
# Call Dialogflow
logger.debug(
f"Calling Dialogflow CX detectIntent for session: {session_id}"
)
response = await self.client.detect_intent(request=detect_request)
# Extract response data
query_result = response.query_result
response_text = self._extract_response_text(response)
# Map to DTO
query_result_dto = QueryResultDTO(
responseText=response_text,
parameters=dict(query_result.parameters)
if query_result.parameters
else None,
)
result = DetectIntentResponseDTO(
responseId=response.response_id,
queryResult=query_result_dto,
)
logger.info(
f"Successfully processed detectIntent for session: {session_id}"
)
return result
except GoogleAPIError as e:
logger.error(
f"Dialogflow CX API error for session {session_id}: {e.message}",
exc_info=True,
)
raise
except Exception as e:
logger.error(
f"Unexpected error in detectIntent for session {session_id}: {str(e)}",
exc_info=True,
)
raise
@retry(
stop=stop_after_attempt(3),
wait=wait_exponential(multiplier=1, min=1, max=10),
retry=retry_if_exception_type((InternalServerError, ServiceUnavailable)),
reraise=True,
)
async def detect_intent_event(
self,
session_id: str,
event_name: str,
parameters: dict | None = None,
language_code: str | None = None,
) -> DetectIntentResponseDTO:
"""
Trigger Dialogflow event detection.
Used for notification events and system-triggered flows.
Args:
session_id: Unique session identifier
event_name: Dialogflow event name (e.g., "notificacion")
parameters: Event parameters
language_code: Language code (defaults to settings)
Returns:
Detect intent response
Raises:
GoogleAPIError: If Dialogflow API call fails
"""
if not session_id:
raise ValueError("Session ID cannot be empty")
if not event_name:
raise ValueError("Event name cannot be empty")
lang_code = language_code or self.default_language
logger.info(
f"Triggering Dialogflow event '{event_name}' for session: {session_id}"
)
try:
# Build request
session_path = self._build_session_path(session_id)
query_input = QueryInput(
event=EventInput(event=event_name),
language_code=lang_code,
)
query_params = None
if parameters:
query_params = QueryParameters(parameters=parameters)
detect_request = DetectIntentRequest(
session=session_path,
query_input=query_input,
query_params=query_params,
)
# Call Dialogflow
logger.debug(
f"Calling Dialogflow CX for event '{event_name}' in session: {session_id}"
)
response = await self.client.detect_intent(request=detect_request)
# Extract response data
query_result = response.query_result
response_text = self._extract_response_text(response)
# Map to DTO
query_result_dto = QueryResultDTO(
responseText=response_text,
parameters=dict(query_result.parameters)
if query_result.parameters
else None,
)
result = DetectIntentResponseDTO(
responseId=response.response_id,
queryResult=query_result_dto,
)
logger.info(
f"Successfully processed event '{event_name}' for session: {session_id}"
)
return result
except GoogleAPIError as e:
logger.error(
f"Dialogflow CX API error for event '{event_name}' in session {session_id}: {e.message}",
exc_info=True,
)
raise
except Exception as e:
logger.error(
f"Unexpected error triggering event '{event_name}' for session {session_id}: {str(e)}",
exc_info=True,
)
raise
async def close(self):
"""Close the Dialogflow client."""
await self.client.transport.close()
logger.info("Dialogflow CX SessionsClient closed")

View File

@@ -0,0 +1,199 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Data Loss Prevention service for obfuscating sensitive information.
"""
import logging
import re
from google.cloud import dlp_v2
from google.cloud.dlp_v2 import types
from ..config import Settings
logger = logging.getLogger(__name__)
class DLPService:
"""
Service for detecting and obfuscating sensitive data using Google Cloud DLP.
Integrates with the DLP API to scan text for PII and other sensitive information,
then obfuscates findings based on their info type.
"""
def __init__(self, settings: Settings):
"""
Initialize DLP service.
Args:
settings: Application settings
"""
self.settings = settings
self.project_id = settings.gcp_project_id
self.location = settings.gcp_location
self.dlp_client = dlp_v2.DlpServiceAsyncClient()
logger.info("DLP Service initialized")
async def get_obfuscated_string(self, text: str, template_id: str) -> str:
"""
Inspect text for sensitive data and obfuscate findings.
Args:
text: Text to inspect and obfuscate
template_id: DLP inspect template ID
Returns:
Obfuscated text with sensitive data replaced
Raises:
Exception: If DLP API call fails (returns original text on error)
"""
if not text or not text.strip():
return text
try:
# Build content item
byte_content_item = types.ByteContentItem(
type_=types.ByteContentItem.BytesType.TEXT_UTF8,
data=text.encode("utf-8"),
)
content_item = types.ContentItem(byte_item=byte_content_item)
# Build inspect config
finding_limits = types.InspectConfig.FindingLimits(
max_findings_per_item=0 # No limit
)
inspect_config = types.InspectConfig(
min_likelihood=types.Likelihood.VERY_UNLIKELY,
limits=finding_limits,
include_quote=True,
)
# Build request
inspect_template_name = f"projects/{self.project_id}/locations/{self.location}/inspectTemplates/{template_id}"
parent = f"projects/{self.project_id}/locations/{self.location}"
request = types.InspectContentRequest(
parent=parent,
inspect_template_name=inspect_template_name,
inspect_config=inspect_config,
item=content_item,
)
# Call DLP API
response = await self.dlp_client.inspect_content(request=request)
findings_count = len(response.result.findings)
logger.info(f"DLP {template_id} Findings: {findings_count}")
if findings_count > 0:
return self._obfuscate_text(response, text)
else:
return text
except Exception as e:
logger.error(
f"Error during DLP inspection: {e}. Returning original text.",
exc_info=True,
)
return text
def _obfuscate_text(self, response: types.InspectContentResponse, text: str) -> str:
"""
Obfuscate sensitive findings in text.
Args:
response: DLP inspect content response with findings
text: Original text
Returns:
Text with sensitive data obfuscated
"""
# Filter findings by likelihood (> POSSIBLE, which is value 3)
findings = [
finding
for finding in response.result.findings
if finding.likelihood.value > 3
]
# Sort by likelihood (descending)
findings.sort(key=lambda f: f.likelihood.value, reverse=True)
for finding in findings:
quote = finding.quote
info_type = finding.info_type.name
logger.info(
f"InfoType: {info_type} | Likelihood: {finding.likelihood.value}"
)
# Obfuscate based on info type
replacement = self._get_replacement(info_type, quote)
if replacement:
text = text.replace(quote, replacement)
# Clean up consecutive DIRECCION tags
text = self._clean_direccion(text)
return text
def _get_replacement(self, info_type: str, quote: str) -> str | None:
"""
Get replacement text for a given info type.
Args:
info_type: DLP info type name
quote: Original sensitive text
Returns:
Replacement text or None to skip
"""
replacements = {
"CREDIT_CARD_NUMBER": f"**** **** **** {self._get_last4(quote)}",
"CREDIT_CARD_EXPIRATION_DATE": "[FECHA_VENCIMIENTO_TARJETA]",
"FECHA_VENCIMIENTO": "[FECHA_VENCIMIENTO_TARJETA]",
"CVV_NUMBER": "[CVV]",
"CVV": "[CVV]",
"EMAIL_ADDRESS": "[CORREO]",
"PERSON_NAME": "[NOMBRE]",
"PHONE_NUMBER": "[TELEFONO]",
"DIRECCION": "[DIRECCION]",
"DIR_COLONIA": "[DIRECCION]",
"DIR_DEL_MUN": "[DIRECCION]",
"DIR_INTERIOR": "[DIRECCION]",
"DIR_ESQUINA": "[DIRECCION]",
"DIR_CIUDAD_EDO": "[DIRECCION]",
"DIR_CP": "[DIRECCION]",
"CLABE_INTERBANCARIA": "[CLABE]",
"CLAVE_RASTREO_SPEI": "[CLAVE_RASTREO]",
"NIP": "[NIP]",
"SALDO": "[SALDO]",
"CUENTA": f"**************{self._get_last4(quote)}",
"NUM_ACLARACION": "[NUM_ACLARACION]",
}
return replacements.get(info_type)
def _get_last4(self, quote: str) -> str:
"""Extract last 4 characters from quote (removing spaces)."""
clean_quote = quote.strip().replace(" ", "")
if len(clean_quote) >= 4:
return clean_quote[-4:]
return clean_quote
def _clean_direccion(self, text: str) -> str:
"""Clean up consecutive [DIRECCION] tags."""
# Replace multiple [DIRECCION] tags separated by commas or spaces with single tag
pattern = r"\[DIRECCION\](?:(?:,\s*|\s+)\[DIRECCION\])*"
return re.sub(pattern, "[DIRECCION]", text).strip()
async def close(self):
"""Close DLP client."""
await self.dlp_client.transport.close()
logger.info("DLP client closed")

View File

@@ -0,0 +1,324 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Firestore service for persistent conversation storage.
"""
import logging
from datetime import datetime
from google.cloud import firestore
from ..config import Settings
from ..models import ConversationSessionDTO, ConversationEntryDTO
from ..models.notification import NotificationDTO
logger = logging.getLogger(__name__)
class FirestoreService:
"""Service for Firestore operations on conversations."""
def __init__(self, settings: Settings):
"""Initialize Firestore client."""
self.settings = settings
self.db = firestore.AsyncClient(
project=settings.gcp_project_id,
database=settings.firestore_database_id,
)
self.conversations_collection = (
f"artifacts/{settings.gcp_project_id}/conversations"
)
self.entries_subcollection = "mensajes"
self.notifications_collection = (
f"artifacts/{settings.gcp_project_id}/notifications"
)
logger.info(
f"Firestore client initialized for project: {settings.gcp_project_id}"
)
async def close(self):
"""Close Firestore client."""
await self.db.close()
logger.info("Firestore client closed")
def _session_ref(self, session_id: str):
"""Get Firestore document reference for session."""
return self.db.collection(self.conversations_collection).document(session_id)
async def get_session(self, session_id: str) -> ConversationSessionDTO | None:
"""Retrieve conversation session from Firestore by session ID."""
try:
doc_ref = self._session_ref(session_id)
doc = await doc_ref.get()
if not doc.exists:
logger.debug(f"Session not found in Firestore: {session_id}")
return None
data = doc.to_dict()
session = ConversationSessionDTO.model_validate(data)
logger.debug(f"Retrieved session from Firestore: {session_id}")
return session
except Exception as e:
logger.error(
f"Error retrieving session {session_id} from Firestore: {str(e)}"
)
return None
async def get_session_by_phone(
self, telefono: str
) -> ConversationSessionDTO | None:
"""
Retrieve most recent conversation session from Firestore by phone number.
Args:
telefono: User phone number
Returns:
Most recent session for this phone, or None if not found
"""
try:
query = (
self.db.collection(self.sessions_collection)
.where("telefono", "==", telefono)
.order_by("lastModified", direction=firestore.Query.DESCENDING)
.limit(1)
)
docs = query.stream()
async for doc in docs:
data = doc.to_dict()
session = ConversationSessionDTO.model_validate(data)
logger.debug(
f"Retrieved session from Firestore for phone {telefono}: {session.sessionId}"
)
return session
logger.debug(f"No session found in Firestore for phone: {telefono}")
return None
except Exception as e:
logger.error(
f"Error querying session by phone {telefono} from Firestore: {str(e)}"
)
return None
async def save_session(self, session: ConversationSessionDTO) -> bool:
"""Save conversation session to Firestore."""
try:
doc_ref = self._session_ref(session.sessionId)
data = session.model_dump()
await doc_ref.set(data, merge=True)
logger.debug(f"Saved session to Firestore: {session.sessionId}")
return True
except Exception as e:
logger.error(
f"Error saving session {session.sessionId} to Firestore: {str(e)}"
)
return False
async def save_entry(self, session_id: str, entry: ConversationEntryDTO) -> bool:
"""Save conversation entry to Firestore subcollection."""
try:
doc_ref = self._session_ref(session_id)
entries_ref = doc_ref.collection(self.entries_subcollection)
# Use timestamp as document ID for chronological ordering
entry_id = entry.timestamp.isoformat()
entry_doc = entries_ref.document(entry_id)
data = entry.model_dump()
await entry_doc.set(data)
logger.debug(f"Saved entry to Firestore for session: {session_id}")
return True
except Exception as e:
logger.error(
f"Error saving entry for session {session_id} to Firestore: {str(e)}"
)
return False
async def get_entries(
self, session_id: str, limit: int = 10
) -> list[ConversationEntryDTO]:
"""Retrieve recent conversation entries from Firestore."""
try:
doc_ref = self._session_ref(session_id)
entries_ref = doc_ref.collection(self.entries_subcollection)
# Get entries ordered by timestamp descending
query = entries_ref.order_by(
"timestamp", direction=firestore.Query.DESCENDING
).limit(limit)
docs = query.stream()
entries = []
async for doc in docs:
entry_data = doc.to_dict()
entry = ConversationEntryDTO.model_validate(entry_data)
entries.append(entry)
# Reverse to get chronological order
entries.reverse()
logger.debug(f"Retrieved {len(entries)} entries for session: {session_id}")
return entries
except Exception as e:
logger.error(
f"Error retrieving entries for session {session_id} from Firestore: {str(e)}"
)
return []
async def delete_session(self, session_id: str) -> bool:
"""Delete conversation session and all entries from Firestore."""
try:
doc_ref = self._session_ref(session_id)
# Delete all entries first
entries_ref = doc_ref.collection(self.entries_subcollection)
async for doc in entries_ref.stream():
await doc.reference.delete()
# Delete session document
await doc_ref.delete()
logger.debug(f"Deleted session from Firestore: {session_id}")
return True
except Exception as e:
logger.error(
f"Error deleting session {session_id} from Firestore: {str(e)}"
)
return False
# ====== Notification Methods ======
def _notification_ref(self, notification_id: str):
"""Get Firestore document reference for notification."""
return self.db.collection(self.notifications_collection).document(
notification_id
)
async def save_or_append_notification(self, new_entry: NotificationDTO) -> None:
"""
Save or append notification entry to Firestore.
Args:
new_entry: Notification entry to save
Raises:
ValueError: If phone number is missing
"""
phone_number = new_entry.telefono
if not phone_number or not phone_number.strip():
raise ValueError("Phone number is required to manage notification entries")
# Use phone number as document ID
notification_session_id = phone_number
try:
doc_ref = self._notification_ref(notification_session_id)
doc = await doc_ref.get()
entry_dict = new_entry.model_dump()
if doc.exists:
# Append to existing session
await doc_ref.update(
{
"notificaciones": firestore.ArrayUnion([entry_dict]),
"ultimaActualizacion": datetime.now(),
}
)
logger.info(
f"Successfully appended notification entry to session {notification_session_id} in Firestore"
)
else:
# Create new notification session
new_session_data = {
"sessionId": notification_session_id,
"telefono": phone_number,
"fechaCreacion": datetime.now(),
"ultimaActualizacion": datetime.now(),
"notificaciones": [entry_dict],
}
await doc_ref.set(new_session_data)
logger.info(
f"Successfully created new notification session {notification_session_id} in Firestore"
)
except Exception as e:
logger.error(
f"Error saving notification to Firestore for phone {phone_number}: {str(e)}",
exc_info=True,
)
raise
async def update_notification_status(self, session_id: str, status: str) -> None:
"""
Update the status of all notifications in a session.
Args:
session_id: Notification session ID (phone number)
status: New status value
"""
try:
doc_ref = self._notification_ref(session_id)
doc = await doc_ref.get()
if not doc.exists:
logger.warning(
f"Notification session {session_id} not found in Firestore. Cannot update status"
)
return
session_data = doc.to_dict()
notifications = session_data.get("notificaciones", [])
# Update status for all notifications
updated_notifications = [
{**notif, "status": status} for notif in notifications
]
await doc_ref.update(
{
"notificaciones": updated_notifications,
"ultimaActualizacion": datetime.now(),
}
)
logger.info(
f"Successfully updated notification status to '{status}' for session {session_id} in Firestore"
)
except Exception as e:
logger.error(
f"Error updating notification status in Firestore for session {session_id}: {str(e)}",
exc_info=True,
)
raise
async def delete_notification(self, notification_id: str) -> bool:
"""Delete notification session from Firestore."""
try:
logger.info(
f"Deleting notification session {notification_id} from Firestore"
)
doc_ref = self._notification_ref(notification_id)
await doc_ref.delete()
logger.info(
f"Successfully deleted notification session {notification_id} from Firestore"
)
return True
except Exception as e:
logger.error(
f"Error deleting notification session {notification_id} from Firestore: {str(e)}",
exc_info=True,
)
return False

View File

@@ -0,0 +1,100 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Gemini client service for LLM operations.
"""
import logging
import google.generativeai as genai
from ..config import Settings
logger = logging.getLogger(__name__)
class GeminiClientException(Exception):
"""Exception raised for Gemini API errors."""
pass
class GeminiClientService:
"""Service for interacting with Google Gemini API."""
def __init__(self, settings: Settings):
"""Initialize Gemini client."""
self.settings = settings
# Configure the Gemini API
genai.configure()
logger.info("Gemini client initialized successfully")
async def generate_content(
self,
prompt: str,
temperature: float,
max_output_tokens: int,
model_name: str,
top_p: float,
) -> str:
"""
Generate content using Gemini API.
Args:
prompt: The prompt text to send to Gemini
temperature: Sampling temperature (0.0 to 1.0)
max_output_tokens: Maximum number of tokens to generate
model_name: Gemini model name (e.g., "gemini-2.0-flash-exp")
top_p: Top-p sampling parameter
Returns:
Generated text response from Gemini
Raises:
GeminiClientException: If API call fails
"""
try:
logger.debug(f"Sending request to Gemini model '{model_name}'")
# Create generation config
generation_config = genai.GenerationConfig(
temperature=temperature,
max_output_tokens=max_output_tokens,
top_p=top_p,
)
# Initialize model
model = genai.GenerativeModel(
model_name=model_name,
generation_config=generation_config,
)
# Generate content
response = await model.generate_content_async(prompt)
if response and response.text:
logger.debug(
f"Received response from Gemini: {len(response.text)} characters"
)
return response.text
else:
logger.warning(
f"Gemini returned no content or unexpected response structure for model '{model_name}'"
)
raise GeminiClientException(
"No content generated or unexpected response structure"
)
except Exception as e:
logger.error(
f"Error during Gemini content generation for model '{model_name}': {e}",
exc_info=True,
)
raise GeminiClientException(
f"An error occurred during content generation: {str(e)}"
) from e

View File

@@ -0,0 +1,105 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
LLM Response Tuner service for storing/retrieving pre-generated responses.
"""
import logging
from redis.asyncio import Redis
logger = logging.getLogger(__name__)
class LlmResponseTunerService:
"""
Service for managing pre-generated LLM responses in Redis.
Used as a webhook bridge where:
1. LLM responses are pre-generated and stored with a UUID
2. Dialogflow webhook calls this service with the UUID
3. Service retrieves and returns the response
"""
def __init__(self, redis: Redis):
"""
Initialize LLM response tuner service.
Args:
redis: Redis client instance
"""
self.redis = redis
self.collection_prefix = "llm-pre-response:"
self.ttl = 3600 # 1 hour in seconds
logger.info("LlmResponseTunerService initialized")
def _get_key(self, uuid: str) -> str:
"""Generate Redis key for UUID."""
return f"{self.collection_prefix}{uuid}"
async def get_value(self, uuid: str) -> str | None:
"""
Retrieve pre-generated response by UUID.
Args:
uuid: Unique identifier for the response
Returns:
Response text or None if not found
"""
if not uuid or not uuid.strip():
logger.warning("UUID is null or blank")
return None
key = self._get_key(uuid)
try:
value = await self.redis.get(key)
if value:
logger.info(f"Retrieved LLM response for UUID: {uuid}")
return value
else:
logger.warning(f"No response found for UUID: {uuid}")
return None
except Exception as e:
logger.error(
f"Error retrieving LLM response for UUID {uuid}: {e}", exc_info=True
)
return None
async def set_value(self, uuid: str, value: str) -> bool:
"""
Store pre-generated response with UUID.
Args:
uuid: Unique identifier for the response
value: Response text to store
Returns:
True if successful, False otherwise
"""
if not uuid or not uuid.strip():
logger.warning("UUID is null or blank")
return False
if not value:
logger.warning("Value is null or empty")
return False
key = self._get_key(uuid)
try:
await self.redis.setex(key, self.ttl, value)
logger.info(f"Stored LLM response for UUID: {uuid} with TTL: {self.ttl}s")
return True
except Exception as e:
logger.error(
f"Error storing LLM response for UUID {uuid}: {e}", exc_info=True
)
return False

View File

@@ -0,0 +1,229 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Mappers for converting DTOs to text format for Gemini API.
"""
import json
import logging
from datetime import datetime, timedelta
from ..models import (
ConversationSessionDTO,
ConversationEntryDTO,
)
from ..models.notification import NotificationDTO
logger = logging.getLogger(__name__)
class NotificationContextMapper:
"""Maps notifications to text format for Gemini classification."""
@staticmethod
def to_text(notification: NotificationDTO) -> str:
"""
Convert a notification to text format.
Args:
notification: Notification DTO
Returns:
Notification text
"""
if not notification or not notification.texto:
return ""
return notification.texto
@staticmethod
def to_text_multiple(notifications: list[NotificationDTO]) -> str:
"""
Convert multiple notifications to text format.
Args:
notifications: List of notification DTOs
Returns:
Notifications joined by newlines
"""
if not notifications:
return ""
texts = [n.texto for n in notifications if n.texto and n.texto.strip()]
return "\n".join(texts)
@staticmethod
def to_json(notification: NotificationDTO) -> str:
"""
Convert notification to JSON string for Gemini.
Args:
notification: Notification DTO
Returns:
JSON representation
"""
if not notification:
return "{}"
data = {
"texto": notification.texto,
"parametros": notification.parametros or {},
"timestamp": notification.timestampCreacion.isoformat(),
}
return json.dumps(data, ensure_ascii=False)
class ConversationContextMapper:
"""Maps conversation history to text format for Gemini."""
# Business rules for conversation history limits
MESSAGE_LIMIT = 60 # Maximum 60 messages
DAYS_LIMIT = 30 # Maximum 30 days
MAX_HISTORY_BYTES = 50 * 1024 # 50 KB maximum size
NOTIFICATION_TEXT_PARAM = "notification_text"
def __init__(self, message_limit: int = 60, days_limit: int = 30):
"""
Initialize conversation context mapper.
Args:
message_limit: Maximum number of messages to include
days_limit: Maximum age of messages in days
"""
self.message_limit = message_limit
self.days_limit = days_limit
def to_text_from_entries(self, entries: list[ConversationEntryDTO]) -> str:
"""
Convert conversation entries to text format.
Args:
entries: List of conversation entries
Returns:
Formatted conversation history
"""
if not entries:
return ""
formatted = [self._format_entry(entry) for entry in entries]
return "\n".join(formatted)
def to_text_with_limits(
self,
session: ConversationSessionDTO,
entries: list[ConversationEntryDTO],
) -> str:
"""
Convert conversation to text with business rule limits applied.
Applies:
- Days limit (30 days)
- Message limit (60 messages)
- Size limit (50 KB)
Args:
session: Conversation session
entries: List of conversation entries
Returns:
Formatted conversation history with limits applied
"""
if not entries:
return ""
# Filter by date (30 days)
cutoff_date = datetime.now() - timedelta(days=self.days_limit)
recent_entries = [
e for e in entries if e.timestamp and e.timestamp >= cutoff_date
]
# Sort by timestamp (oldest first) and limit count
recent_entries.sort(key=lambda e: e.timestamp)
limited_entries = recent_entries[-self.message_limit :]
# Apply size truncation (50 KB)
return self._to_text_with_truncation(limited_entries)
def _to_text_with_truncation(self, entries: list[ConversationEntryDTO]) -> str:
"""
Format entries with size truncation (50 KB max).
Args:
entries: List of conversation entries
Returns:
Formatted text, truncated if necessary
"""
if not entries:
return ""
# Format all messages
formatted_messages = [self._format_entry(entry) for entry in entries]
# Build from newest to oldest, stopping at 50KB
text_block = []
current_size = 0
# Iterate from newest to oldest
for message in reversed(formatted_messages):
message_line = message + "\n"
message_bytes = len(message_line.encode("utf-8"))
if current_size + message_bytes > self.MAX_HISTORY_BYTES:
break
text_block.insert(0, message_line)
current_size += message_bytes
return "".join(text_block).strip()
def _format_entry(self, entry: ConversationEntryDTO) -> str:
"""
Format a single conversation entry.
Args:
entry: Conversation entry
Returns:
Formatted string (e.g., "User: hello", "Agent: hi there")
"""
prefix = "User: "
content = entry.text
# Determine prefix based on entity
if entry.entity == "AGENTE":
prefix = "Agent: "
# Clean JSON artifacts from agent messages
content = self._clean_agent_message(content)
elif entry.entity == "SISTEMA":
prefix = "System: "
# Check if this is a notification in parameters
if entry.parameters and self.NOTIFICATION_TEXT_PARAM in entry.parameters:
param_text = entry.parameters[self.NOTIFICATION_TEXT_PARAM]
if param_text and str(param_text).strip():
content = str(param_text)
elif entry.entity == "LLM":
prefix = "System: "
return prefix + content
def _clean_agent_message(self, message: str) -> str:
"""
Clean agent message by removing JSON artifacts at the end.
Args:
message: Original message
Returns:
Cleaned message
"""
# Remove trailing {...} patterns
import re
return re.sub(r"\s*\{.*\}\s*$", "", message).strip()

View File

@@ -0,0 +1,156 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Message classification service using Gemini LLM.
"""
import logging
from ..config import Settings
from .gemini_client import GeminiClientService, GeminiClientException
logger = logging.getLogger(__name__)
class MessageEntryFilter:
"""
Classifies a user's text input into a predefined category using Gemini.
Analyzes user queries in the context of conversation history and
notifications to determine if the message is part of ongoing dialogue
or an interruption. Classification is used to route requests to the
appropriate handler.
"""
# Classification categories
CATEGORY_CONVERSATION = "CONVERSATION"
CATEGORY_NOTIFICATION = "NOTIFICATION"
CATEGORY_UNKNOWN = "UNKNOWN"
CATEGORY_ERROR = "ERROR"
def __init__(self, settings: Settings, gemini_service: GeminiClientService):
"""
Initialize message filter.
Args:
settings: Application settings
gemini_service: Gemini client service
"""
self.settings = settings
self.gemini_service = gemini_service
self.prompt_template = self._load_prompt_template()
logger.info("MessageEntryFilter initialized successfully")
def _load_prompt_template(self) -> str:
"""Load the prompt template from resources."""
prompt_path = self.settings.base_path / self.settings.message_filter_prompt_path
try:
with open(prompt_path, "r", encoding="utf-8") as f:
prompt_template = f.read()
logger.info(f"Successfully loaded prompt template from '{prompt_path}'")
return prompt_template
except Exception as e:
logger.error(
f"Failed to load prompt template from '{prompt_path}': {e}",
exc_info=True,
)
raise RuntimeError("Could not load prompt template") from e
async def classify_message(
self,
query_input_text: str,
notifications_json: str | None = None,
conversation_json: str | None = None,
) -> str:
"""
Classify a user message as CONVERSATION, NOTIFICATION, or UNKNOWN.
Args:
query_input_text: The user's input text to classify
notifications_json: JSON string of interrupting notifications (optional)
conversation_json: JSON string of conversation history (optional)
Returns:
Classification category (CONVERSATION, NOTIFICATION, or UNKNOWN)
"""
if not query_input_text or not query_input_text.strip():
logger.warning(
f"Query input text for classification is null or blank. Returning {self.CATEGORY_UNKNOWN}"
)
return self.CATEGORY_UNKNOWN
# Prepare context strings
interrupting_notification = (
notifications_json
if notifications_json and notifications_json.strip()
else "No interrupting notification."
)
conversation_history = (
conversation_json
if conversation_json and conversation_json.strip()
else "No conversation history."
)
# Format the classification prompt
classification_prompt = self.prompt_template % (
conversation_history,
interrupting_notification,
query_input_text,
)
logger.debug(
f"Sending classification request to Gemini for input (first 100 chars): "
f"'{query_input_text[:100]}...'"
)
try:
# Call Gemini API
gemini_response = await self.gemini_service.generate_content(
prompt=classification_prompt,
temperature=self.settings.message_filter_temperature,
max_output_tokens=self.settings.message_filter_max_tokens,
model_name=self.settings.message_filter_model,
top_p=self.settings.message_filter_top_p,
)
# Parse and validate response
if not gemini_response:
logger.warning(
f"Gemini returned null/blank response. Returning {self.CATEGORY_UNKNOWN}"
)
return self.CATEGORY_UNKNOWN
response_upper = gemini_response.strip().upper()
if response_upper == self.CATEGORY_CONVERSATION:
logger.info(f"Classified as {self.CATEGORY_CONVERSATION}")
return self.CATEGORY_CONVERSATION
elif response_upper == self.CATEGORY_NOTIFICATION:
logger.info(f"Classified as {self.CATEGORY_NOTIFICATION}")
return self.CATEGORY_NOTIFICATION
else:
logger.warning(
f"Gemini returned unrecognized classification: '{gemini_response}'. "
f"Expected '{self.CATEGORY_CONVERSATION}' or '{self.CATEGORY_NOTIFICATION}'. "
f"Returning {self.CATEGORY_UNKNOWN}"
)
return self.CATEGORY_UNKNOWN
except GeminiClientException as e:
logger.error(
f"Error during Gemini content generation for message classification: {e}",
exc_info=True,
)
return self.CATEGORY_UNKNOWN
except Exception as e:
logger.error(
f"Unexpected error during message classification: {e}",
exc_info=True,
)
return self.CATEGORY_UNKNOWN

View File

@@ -0,0 +1,192 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Notification context resolver using Gemini LLM.
"""
import logging
from ..config import Settings
from .gemini_client import GeminiClientService, GeminiClientException
logger = logging.getLogger(__name__)
class NotificationContextResolver:
"""
Resolves conversational context using LLM to answer notification-related questions.
Evaluates a user's question in the context of a notification and conversation
history. Decides if the query can be answered by the LLM (using notification
metadata) or should be delegated to Dialogflow.
"""
# Response categories
CATEGORY_DIALOGFLOW = "DIALOGFLOW"
def __init__(self, settings: Settings, gemini_service: GeminiClientService):
"""
Initialize notification context resolver.
Args:
settings: Application settings
gemini_service: Gemini client service
"""
self.settings = settings
self.gemini_service = gemini_service
# Load settings (with defaults matching Java)
self.model_name = getattr(
settings, "notification_context_model", "gemini-2.0-flash-001"
)
self.temperature = getattr(settings, "notification_context_temperature", 0.1)
self.max_tokens = getattr(settings, "notification_context_max_tokens", 1024)
self.top_p = getattr(settings, "notification_context_top_p", 0.1)
self.prompt_path = getattr(
settings,
"notification_context_prompt_path",
"prompts/notification_context_resolver.txt",
)
self.prompt_template = self._load_prompt_template()
logger.info("NotificationContextResolver initialized successfully")
def _load_prompt_template(self) -> str:
"""Load the prompt template from resources."""
prompt_path = self.settings.base_path / self.prompt_path
try:
with open(prompt_path, "r", encoding="utf-8") as f:
prompt_template = f.read()
logger.info(f"Successfully loaded prompt template from '{prompt_path}'")
return prompt_template
except Exception as e:
logger.error(
f"Failed to load prompt template from '{prompt_path}': {e}",
exc_info=True,
)
raise RuntimeError("Could not load prompt template") from e
async def resolve_context(
self,
query_input_text: str,
notifications_json: str | None = None,
conversation_json: str | None = None,
metadata: str | None = None,
user_id: str | None = None,
session_id: str | None = None,
user_phone_number: str | None = None,
) -> str:
"""
Resolve context and generate response for notification-related question.
Uses Gemini to analyze the question against notification metadata and
conversation history. Returns either:
- A direct answer generated by the LLM
- "DIALOGFLOW" to delegate to standard Dialogflow flow
Priority order for finding answers:
1. METADATOS_NOTIFICACION (highest authority)
2. HISTORIAL_CONVERSACION parameters with "notification_po_" prefix
3. If not found, return "DIALOGFLOW"
Args:
query_input_text: User's question
notifications_json: JSON string of notifications
conversation_json: JSON string of conversation history
metadata: Structured notification metadata
user_id: User identifier (optional, for logging)
session_id: Session identifier (optional, for logging)
user_phone_number: User phone number (optional, for logging)
Returns:
Either a direct LLM-generated answer or "DIALOGFLOW"
"""
logger.debug(
f"resolveContext -> queryInputText: {query_input_text}, "
f"notificationsJson: {notifications_json}, "
f"conversationJson: {conversation_json}, "
f"metadata: {metadata}"
)
if not query_input_text or not query_input_text.strip():
logger.warning(
f"Query input text for context resolution is null or blank. "
f"Returning {self.CATEGORY_DIALOGFLOW}"
)
return self.CATEGORY_DIALOGFLOW
# Prepare context strings
notification_content = (
notifications_json
if notifications_json and notifications_json.strip()
else "No metadata in notification."
)
conversation_history = (
conversation_json
if conversation_json and conversation_json.strip()
else "No conversation history."
)
notification_metadata = metadata if metadata and metadata.strip() else "{}"
# Format the context resolution prompt
context_prompt = self.prompt_template % (
conversation_history,
notification_content,
notification_metadata,
query_input_text,
)
logger.debug(
f"Sending context resolution request to Gemini for input (first 100 chars): "
f"'{query_input_text[:100]}...'"
)
try:
# Call Gemini API
gemini_response = await self.gemini_service.generate_content(
prompt=context_prompt,
temperature=self.temperature,
max_output_tokens=self.max_tokens,
model_name=self.model_name,
top_p=self.top_p,
)
if gemini_response and gemini_response.strip():
# Check if response is delegation to Dialogflow
if gemini_response.strip().upper() == self.CATEGORY_DIALOGFLOW:
logger.debug(
f"Resolved to {self.CATEGORY_DIALOGFLOW}. Input: '{query_input_text}'"
)
return self.CATEGORY_DIALOGFLOW
else:
# LLM provided a direct answer
logger.debug(
f"Resolved to a specific response. Input: '{query_input_text}'"
)
return gemini_response
else:
logger.warning(
f"Gemini returned a null or blank response. "
f"Returning {self.CATEGORY_DIALOGFLOW}"
)
return self.CATEGORY_DIALOGFLOW
except GeminiClientException as e:
logger.error(
f"Error during Gemini content generation for context resolution: {e}",
exc_info=True,
)
return self.CATEGORY_DIALOGFLOW
except Exception as e:
logger.error(
f"Unexpected error during context resolution: {e}",
exc_info=True,
)
return self.CATEGORY_DIALOGFLOW

View File

@@ -0,0 +1,259 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Notification manager service for processing push notifications.
"""
import logging
from datetime import datetime
from ..config import Settings
from ..models import DetectIntentResponseDTO
from ..models.notification import ExternalNotRequestDTO, NotificationDTO
from ..models.conversation import ConversationSessionDTO, ConversationEntryDTO
from ..utils.session_id import generate_session_id
from .dialogflow_client import DialogflowClientService
from .redis_service import RedisService
from .firestore_service import FirestoreService
from .dlp_service import DLPService
logger = logging.getLogger(__name__)
PREFIX_PO_PARAM = "notification_po_"
class NotificationManagerService:
"""
Manages notification processing and integration with conversations.
Handles push notifications from external systems, stores them in
Redis/Firestore, and triggers Dialogflow event detection.
"""
def __init__(
self,
settings: Settings,
dialogflow_client: DialogflowClientService,
redis_service: RedisService,
firestore_service: FirestoreService,
dlp_service: DLPService,
):
"""
Initialize notification manager.
Args:
settings: Application settings
dialogflow_client: Dialogflow CX client
redis_service: Redis caching service
firestore_service: Firestore persistence service
dlp_service: Data Loss Prevention service
"""
self.settings = settings
self.dialogflow_client = dialogflow_client
self.redis_service = redis_service
self.firestore_service = firestore_service
self.dlp_service = dlp_service
self.default_language_code = settings.dialogflow_default_language
self.event_name = "notificacion"
logger.info("NotificationManagerService initialized")
async def process_notification(
self, external_request: ExternalNotRequestDTO
) -> DetectIntentResponseDTO:
"""
Process a push notification from external system.
Flow:
1. Validate phone number
2. Obfuscate sensitive data (DLP - TODO)
3. Create notification entry
4. Save to Redis and Firestore
5. Get or create conversation session
6. Add notification to conversation history
7. Trigger Dialogflow event
Args:
external_request: External notification request
Returns:
Dialogflow detect intent response
Raises:
ValueError: If phone number is missing
"""
telefono = external_request.telefono
if not telefono or not telefono.strip():
logger.warning("No phone number provided in notification request")
raise ValueError("Phone number is required")
# Obfuscate sensitive data using DLP
obfuscated_text = await self.dlp_service.get_obfuscated_string(
external_request.texto,
self.settings.dlp_template_complete_flow,
)
# Prepare parameters with prefix
parameters = {}
if external_request.parametros_ocultos:
for key, value in external_request.parametros_ocultos.items():
parameters[f"{PREFIX_PO_PARAM}{key}"] = value
# Create notification entry
new_notification_id = generate_session_id()
new_notification_entry = NotificationDTO(
idNotificacion=new_notification_id,
telefono=telefono,
timestampCreacion=datetime.now(),
texto=obfuscated_text,
nombreEventoDialogflow=self.event_name,
codigoIdiomaDialogflow=self.default_language_code,
parametros=parameters,
status="active",
)
# Save notification to Redis (with async Firestore write-back)
await self.redis_service.save_or_append_notification(new_notification_entry)
logger.info(
f"Notification for phone {telefono} cached. Kicking off async Firestore write-back"
)
# Fire-and-forget Firestore write
# In production, consider using asyncio.create_task() with proper error handling
try:
await self.firestore_service.save_or_append_notification(
new_notification_entry
)
logger.debug(
f"Notification entry persisted to Firestore for phone {telefono}"
)
except Exception as e:
logger.error(
f"Background: Error during notification persistence to Firestore for phone {telefono}: {e}",
exc_info=True,
)
# Get or create conversation session
session = await self._get_or_create_conversation_session(
telefono, obfuscated_text, parameters
)
# Send notification event to Dialogflow
logger.info(
f"Sending notification text to Dialogflow using conversation session: {session.sessionId}"
)
response = await self.dialogflow_client.detect_intent_event(
session_id=session.sessionId,
event_name=self.event_name,
parameters=parameters,
language_code=self.default_language_code,
)
logger.info(
f"Finished processing notification. Dialogflow response received for phone {telefono}"
)
return response
async def _get_or_create_conversation_session(
self, telefono: str, notification_text: str, parameters: dict
) -> ConversationSessionDTO:
"""
Get existing conversation session or create a new one.
Also persists system entry for the notification.
Args:
telefono: User phone number
notification_text: Notification text content
parameters: Notification parameters
Returns:
Conversation session
"""
# Try to get existing session by phone
# TODO: Need to implement get_session_by_telefono in Redis service
# For now, we'll create a new session
new_session_id = generate_session_id()
user_id = f"user_by_phone_{telefono}"
logger.info(
f"Creating new conversation session {new_session_id} for notification (phone: {telefono})"
)
# Create system entry for notification
system_entry = ConversationEntryDTO(
entity="SISTEMA",
type="SISTEMA",
timestamp=datetime.now(),
text=notification_text,
parameters=parameters,
intent=None,
)
# Create new session
new_session = ConversationSessionDTO(
sessionId=new_session_id,
userId=user_id,
telefono=telefono,
createdAt=datetime.now(),
lastModified=datetime.now(),
lastMessage=notification_text,
pantallaContexto=None,
)
# Persist conversation turn (session + system entry)
await self._persist_conversation_turn(new_session, system_entry)
return new_session
async def _persist_conversation_turn(
self, session: ConversationSessionDTO, entry: ConversationEntryDTO
) -> None:
"""
Persist conversation turn to Redis and Firestore.
Uses write-through caching: writes to Redis first, then async to Firestore.
Args:
session: Conversation session
entry: Conversation entry to persist
"""
logger.debug(
f"Starting Write-Back persistence for notification session {session.sessionId}. "
f"Type: {entry.type}. Writing to Redis first"
)
# Update session with last message
updated_session = ConversationSessionDTO(
**session.model_dump(),
lastMessage=entry.text,
lastModified=datetime.now(),
)
# Save to Redis
await self.redis_service.save_session(updated_session)
logger.info(
f"Entry saved to Redis for notification session {session.sessionId}. "
f"Type: {entry.type}. Kicking off async Firestore write-back"
)
# Fire-and-forget Firestore writes
try:
await self.firestore_service.save_session(updated_session)
await self.firestore_service.save_entry(session.sessionId, entry)
logger.debug(
f"Asynchronously (Write-Back): Entry successfully saved to Firestore "
f"for notification session {session.sessionId}. Type: {entry.type}"
)
except Exception as e:
logger.error(
f"Asynchronously (Write-Back): Failed to save entry to Firestore "
f"for notification session {session.sessionId}. Type: {entry.type}: {e}",
exc_info=True,
)

View File

@@ -0,0 +1,98 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Quick Reply content service for loading FAQ screens.
"""
import json
import logging
from ..config import Settings
from ..models.quick_replies import QuickReplyDTO, QuestionDTO
logger = logging.getLogger(__name__)
class QuickReplyContentService:
"""Service for loading quick reply screen content from JSON files."""
def __init__(self, settings: Settings):
"""
Initialize quick reply content service.
Args:
settings: Application settings
"""
self.settings = settings
self.quick_replies_path = settings.base_path / "quick_replies"
logger.info(
f"QuickReplyContentService initialized with path: {self.quick_replies_path}"
)
async def get_quick_replies(self, screen_id: str) -> QuickReplyDTO | None:
"""
Load quick reply screen content by ID.
Args:
screen_id: Screen identifier (e.g., "pagos", "home")
Returns:
Quick reply DTO or None if not found
"""
if not screen_id or not screen_id.strip():
logger.warning("screen_id is null or empty. Returning empty quick replies")
return QuickReplyDTO(
header="empty",
body=None,
button=None,
header_section=None,
preguntas=[],
)
file_path = self.quick_replies_path / f"{screen_id}.json"
try:
if not file_path.exists():
logger.warning(f"Quick reply file not found: {file_path}")
return None
with open(file_path, "r", encoding="utf-8") as f:
data = json.load(f)
# Parse questions
preguntas_data = data.get("preguntas", [])
preguntas = [
QuestionDTO(
titulo=q.get("titulo", ""),
descripcion=q.get("descripcion"),
respuesta=q.get("respuesta", ""),
)
for q in preguntas_data
]
quick_reply = QuickReplyDTO(
header=data.get("header"),
body=data.get("body"),
button=data.get("button"),
header_section=data.get("header_section"),
preguntas=preguntas,
)
logger.info(
f"Successfully loaded {len(preguntas)} quick replies for screen: {screen_id}"
)
return quick_reply
except json.JSONDecodeError as e:
logger.error(f"Error parsing JSON file {file_path}: {e}", exc_info=True)
return None
except Exception as e:
logger.error(
f"Error loading quick replies for screen {screen_id}: {e}",
exc_info=True,
)
return None

View File

@@ -0,0 +1,373 @@
"""
Copyright 2025 Google. This software is provided as-is, without warranty or
representation for any use or purpose. Your use of it is subject to your
agreement with Google.
Redis service for caching conversation sessions.
"""
import json
import logging
from datetime import datetime
from redis.asyncio import Redis
from ..config import Settings
from ..models import ConversationSessionDTO
from ..models.notification import NotificationSessionDTO, NotificationDTO
logger = logging.getLogger(__name__)
class RedisService:
"""Service for Redis operations on conversation sessions."""
def __init__(self, settings: Settings):
"""Initialize Redis client."""
self.settings = settings
self.redis: Redis | None = None
self.session_ttl = 2592000 # 30 days in seconds
self.notification_ttl = 2592000 # 30 days in seconds
async def connect(self):
"""Connect to Redis."""
self.redis = Redis(
host=self.settings.redis_host,
port=self.settings.redis_port,
password=self.settings.redis_password,
ssl=self.settings.redis_ssl,
decode_responses=True,
)
logger.info(
f"Connected to Redis at {self.settings.redis_host}:{self.settings.redis_port}"
)
async def close(self):
"""Close Redis connection."""
if self.redis:
await self.redis.close()
logger.info("Redis connection closed")
def _session_key(self, session_id: str) -> str:
"""Generate Redis key for conversation session."""
return f"conversation:session:{session_id}"
def _phone_to_session_key(self, phone: str) -> str:
"""Generate Redis key for phone-to-session mapping."""
return f"conversation:phone:{phone}"
async def get_session(
self, session_id_or_phone: str
) -> ConversationSessionDTO | None:
"""
Retrieve conversation session from Redis by session ID or phone number.
Args:
session_id_or_phone: Either a session ID or phone number
Returns:
Conversation session or None if not found
"""
if not self.redis:
raise RuntimeError("Redis client not connected")
# First try as phone number (lookup session ID)
phone_key = self._phone_to_session_key(session_id_or_phone)
mapped_session_id = await self.redis.get(phone_key)
if mapped_session_id:
# Found phone mapping, get the actual session
session_id = mapped_session_id
else:
# Try as direct session ID
session_id = session_id_or_phone
# Get session by ID
key = self._session_key(session_id)
data = await self.redis.get(key)
if not data:
logger.debug(f"Session not found in Redis: {session_id_or_phone}")
return None
try:
session_dict = json.loads(data)
session = ConversationSessionDTO.model_validate(session_dict)
logger.debug(f"Retrieved session from Redis: {session_id}")
return session
except Exception as e:
logger.error(f"Error deserializing session {session_id}: {str(e)}")
return None
async def save_session(self, session: ConversationSessionDTO) -> bool:
"""
Save conversation session to Redis with TTL.
Also stores phone-to-session mapping for lookup by phone number.
"""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._session_key(session.sessionId)
phone_key = self._phone_to_session_key(session.telefono)
try:
# Save session data
data = session.model_dump_json(by_alias=False)
await self.redis.setex(key, self.session_ttl, data)
# Save phone-to-session mapping
await self.redis.setex(phone_key, self.session_ttl, session.sessionId)
logger.debug(
f"Saved session to Redis: {session.sessionId} for phone: {session.telefono}"
)
return True
except Exception as e:
logger.error(f"Error saving session {session.sessionId} to Redis: {str(e)}")
return False
async def delete_session(self, session_id: str) -> bool:
"""Delete conversation session from Redis."""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._session_key(session_id)
try:
result = await self.redis.delete(key)
logger.debug(f"Deleted session from Redis: {session_id}")
return result > 0
except Exception as e:
logger.error(f"Error deleting session {session_id} from Redis: {str(e)}")
return False
async def exists(self, session_id: str) -> bool:
"""Check if session exists in Redis."""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._session_key(session_id)
return await self.redis.exists(key) > 0
# ====== Message Methods ======
def _messages_key(self, session_id: str) -> str:
"""Generate Redis key for conversation messages."""
return f"conversation:messages:{session_id}"
async def save_message(self, session_id: str, message) -> bool:
"""
Save a conversation message to Redis sorted set.
Messages are stored in a sorted set with timestamp as score.
Args:
session_id: The session ID
message: ConversationMessageDTO or ConversationEntryDTO
Returns:
True if successful, False otherwise
"""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._messages_key(session_id)
try:
# Convert message to JSON
message_data = message.model_dump_json(by_alias=False)
# Use timestamp as score (in milliseconds)
score = message.timestamp.timestamp() * 1000
# Add to sorted set
await self.redis.zadd(key, {message_data: score})
# Set TTL on the messages key to match session TTL
await self.redis.expire(key, self.session_ttl)
logger.debug(f"Saved message to Redis: {session_id}")
return True
except Exception as e:
logger.error(f"Error saving message to Redis for session {session_id}: {str(e)}")
return False
async def get_messages(self, session_id: str) -> list:
"""
Retrieve all conversation messages for a session from Redis.
Returns messages ordered by timestamp (oldest first).
Args:
session_id: The session ID
Returns:
List of message dictionaries (parsed from JSON)
"""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._messages_key(session_id)
try:
# Get all messages from sorted set (ordered by score/timestamp)
message_strings = await self.redis.zrange(key, 0, -1)
if not message_strings:
logger.debug(f"No messages found in Redis for session: {session_id}")
return []
# Parse JSON strings to dictionaries
messages = []
for msg_str in message_strings:
try:
messages.append(json.loads(msg_str))
except json.JSONDecodeError as e:
logger.error(f"Error parsing message JSON: {str(e)}")
continue
logger.debug(f"Retrieved {len(messages)} messages from Redis for session: {session_id}")
return messages
except Exception as e:
logger.error(f"Error retrieving messages from Redis for session {session_id}: {str(e)}")
return []
# ====== Notification Methods ======
def _notification_key(self, session_id: str) -> str:
"""Generate Redis key for notification session."""
return f"notification:{session_id}"
def _phone_to_notification_key(self, phone: str) -> str:
"""Generate Redis key for phone-to-notification mapping."""
return f"notification:phone_to_notification:{phone}"
async def save_or_append_notification(self, new_entry: NotificationDTO) -> None:
"""
Save or append notification entry to session.
Args:
new_entry: Notification entry to save
Raises:
ValueError: If phone number is missing
"""
if not self.redis:
raise RuntimeError("Redis client not connected")
phone_number = new_entry.telefono
if not phone_number or not phone_number.strip():
raise ValueError("Phone number is required to manage notification entries")
# Use phone number as session ID for notifications
notification_session_id = phone_number
# Get existing session or create new one
existing_session = await self.get_notification_session(notification_session_id)
if existing_session:
# Append to existing session
updated_notifications = existing_session.notificaciones + [new_entry]
updated_session = NotificationSessionDTO(
session_id=notification_session_id,
telefono=phone_number,
fecha_creacion=existing_session.fecha_creacion,
ultima_actualizacion=datetime.now(),
notificaciones=updated_notifications,
)
else:
# Create new session
updated_session = NotificationSessionDTO(
session_id=notification_session_id,
telefono=phone_number,
fecha_creacion=datetime.now(),
ultima_actualizacion=datetime.now(),
notificaciones=[new_entry],
)
# Save to Redis
await self._cache_notification_session(updated_session)
async def _cache_notification_session(
self, session: NotificationSessionDTO
) -> bool:
"""Cache notification session in Redis."""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._notification_key(session.sessionId)
phone_key = self._phone_to_notification_key(session.telefono)
try:
# Save notification session
data = session.model_dump_json(by_alias=False)
await self.redis.setex(key, self.notification_ttl, data)
# Save phone-to-session mapping
await self.redis.setex(phone_key, self.notification_ttl, session.sessionId)
logger.debug(f"Cached notification session: {session.sessionId}")
return True
except Exception as e:
logger.error(
f"Error caching notification session {session.sessionId}: {str(e)}"
)
return False
async def get_notification_session(
self, session_id: str
) -> NotificationSessionDTO | None:
"""Retrieve notification session from Redis."""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._notification_key(session_id)
data = await self.redis.get(key)
if not data:
logger.debug(f"Notification session not found in Redis: {session_id}")
return None
try:
session_dict = json.loads(data)
session = NotificationSessionDTO.model_validate(session_dict)
logger.info(f"Notification session {session_id} retrieved from Redis")
return session
except Exception as e:
logger.error(
f"Error deserializing notification session {session_id}: {str(e)}"
)
return None
async def get_notification_id_for_phone(self, phone: str) -> str | None:
"""Get notification session ID for a phone number."""
if not self.redis:
raise RuntimeError("Redis client not connected")
key = self._phone_to_notification_key(phone)
session_id = await self.redis.get(key)
if session_id:
logger.info(f"Session ID {session_id} found for phone")
else:
logger.debug("Session ID not found for phone")
return session_id
async def delete_notification_session(self, phone_number: str) -> bool:
"""Delete notification session from Redis."""
if not self.redis:
raise RuntimeError("Redis client not connected")
notification_key = self._notification_key(phone_number)
phone_key = self._phone_to_notification_key(phone_number)
try:
logger.info(f"Deleting notification session for phone {phone_number}")
await self.redis.delete(notification_key)
await self.redis.delete(phone_key)
return True
except Exception as e:
logger.error(
f"Error deleting notification session for phone {phone_number}: {str(e)}"
)
return False