Coverage for src / dataknobs_bots / middleware / logging.py: 24%
55 statements
« prev ^ index » next coverage.py v7.13.0, created at 2025-12-16 10:13 -0700
« prev ^ index » next coverage.py v7.13.0, created at 2025-12-16 10:13 -0700
1"""Logging middleware for conversation tracking."""
3import json
4import logging
5from datetime import datetime, timezone
6from typing import Any
8from dataknobs_bots.bot.context import BotContext
10from .base import Middleware
12logger = logging.getLogger(__name__)
15class LoggingMiddleware(Middleware):
16 """Middleware for tracking conversation interactions.
18 Logs all user messages and bot responses with context
19 for monitoring, debugging, and analytics.
21 Attributes:
22 log_level: Logging level to use (default: INFO)
23 include_metadata: Whether to include full context metadata
24 json_format: Whether to output logs in JSON format
26 Example:
27 ```python
28 # Basic usage
29 middleware = LoggingMiddleware()
31 # With JSON format for log aggregation
32 middleware = LoggingMiddleware(
33 log_level="INFO",
34 include_metadata=True,
35 json_format=True
36 )
37 ```
38 """
40 def __init__(
41 self,
42 log_level: str = "INFO",
43 include_metadata: bool = True,
44 json_format: bool = False,
45 ):
46 """Initialize logging middleware.
48 Args:
49 log_level: Logging level (DEBUG, INFO, WARNING, ERROR)
50 include_metadata: Whether to log full context metadata
51 json_format: Whether to output in JSON format
52 """
53 self.log_level = log_level
54 self.include_metadata = include_metadata
55 self.json_format = json_format
56 self._logger = logging.getLogger(f"{__name__}.ConversationLogger")
57 self._logger.setLevel(getattr(logging, log_level.upper()))
59 async def before_message(self, message: str, context: BotContext) -> None:
60 """Called before processing user message.
62 Args:
63 message: User's input message
64 context: Bot context with conversation and user info
65 """
66 log_data = {
67 "timestamp": datetime.now(timezone.utc).isoformat(),
68 "event": "user_message",
69 "client_id": context.client_id,
70 "user_id": context.user_id,
71 "conversation_id": context.conversation_id,
72 "message_length": len(message),
73 }
75 if self.include_metadata:
76 log_data["session_metadata"] = context.session_metadata
77 log_data["request_metadata"] = context.request_metadata
79 if self.json_format:
80 self._logger.info(json.dumps(log_data))
81 else:
82 self._logger.info(f"User message: {log_data}")
84 # Log content at DEBUG level (first 200 chars)
85 self._logger.debug(f"Message content: {message[:200]}...")
87 async def after_message(
88 self, response: str, context: BotContext, **kwargs: Any
89 ) -> None:
90 """Called after generating bot response.
92 Args:
93 response: Bot's generated response
94 context: Bot context
95 **kwargs: Additional data (e.g., tokens_used, response_time_ms)
96 """
97 log_data = {
98 "timestamp": datetime.now(timezone.utc).isoformat(),
99 "event": "bot_response",
100 "client_id": context.client_id,
101 "user_id": context.user_id,
102 "conversation_id": context.conversation_id,
103 "response_length": len(response),
104 }
106 # Add optional metrics
107 if "tokens_used" in kwargs:
108 log_data["tokens_used"] = kwargs["tokens_used"]
109 if "response_time_ms" in kwargs:
110 log_data["response_time_ms"] = kwargs["response_time_ms"]
111 if "provider" in kwargs:
112 log_data["provider"] = kwargs["provider"]
113 if "model" in kwargs:
114 log_data["model"] = kwargs["model"]
116 if self.include_metadata:
117 log_data["session_metadata"] = context.session_metadata
118 log_data["request_metadata"] = context.request_metadata
120 if self.json_format:
121 self._logger.info(json.dumps(log_data))
122 else:
123 self._logger.info(f"Bot response: {log_data}")
125 # Log content at DEBUG level (first 200 chars)
126 self._logger.debug(f"Response content: {response[:200]}...")
128 async def post_stream(
129 self, message: str, response: str, context: BotContext
130 ) -> None:
131 """Called after streaming response completes.
133 Args:
134 message: Original user message
135 response: Complete accumulated response from streaming
136 context: Bot context
137 """
138 log_data = {
139 "timestamp": datetime.now(timezone.utc).isoformat(),
140 "event": "stream_complete",
141 "client_id": context.client_id,
142 "user_id": context.user_id,
143 "conversation_id": context.conversation_id,
144 "message_length": len(message),
145 "response_length": len(response),
146 }
148 if self.include_metadata:
149 log_data["session_metadata"] = context.session_metadata
150 log_data["request_metadata"] = context.request_metadata
152 if self.json_format:
153 self._logger.info(json.dumps(log_data))
154 else:
155 self._logger.info(f"Stream complete: {log_data}")
157 # Log content at DEBUG level (first 200 chars each)
158 self._logger.debug(f"Streamed message: {message[:200]}...")
159 self._logger.debug(f"Streamed response: {response[:200]}...")
161 async def on_error(
162 self, error: Exception, message: str, context: BotContext
163 ) -> None:
164 """Called when an error occurs during message processing.
166 Args:
167 error: The exception that occurred
168 message: User message that caused the error
169 context: Bot context
170 """
171 log_data = {
172 "timestamp": datetime.now(timezone.utc).isoformat(),
173 "event": "error",
174 "client_id": context.client_id,
175 "user_id": context.user_id,
176 "conversation_id": context.conversation_id,
177 "error_type": type(error).__name__,
178 "error_message": str(error),
179 }
181 if self.json_format:
182 self._logger.error(json.dumps(log_data), exc_info=error)
183 else:
184 self._logger.error(f"Error processing message: {log_data}", exc_info=error)