Coverage for src / moai_adk / utils / common.py: 0.00%
157 statements
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-20 20:52 +0900
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-20 20:52 +0900
1"""
2Common Utilities
3Common utility functions
4"""
6import asyncio
7import json
8import logging
9import re
10from dataclasses import dataclass, field
11from datetime import datetime
12from pathlib import Path
13from typing import Dict, List, Optional
14from urllib.parse import urlparse
16import aiohttp
18logger = logging.getLogger(__name__)
21@dataclass
22class HTTPResponse:
23 """HTTP response data"""
25 status_code: int
26 url: str
27 load_time: float
28 success: bool
29 error_message: Optional[str] = None
30 timestamp: datetime = field(default_factory=datetime.now)
32 def __post_init__(self):
33 if self.timestamp is None:
34 self.timestamp = datetime.now()
37class HTTPClient:
38 """HTTP client utility"""
40 def __init__(self, max_concurrent: int = 5, timeout: int = 10):
41 self.max_concurrent = max_concurrent
42 self.timeout = timeout
43 self.session: Optional[aiohttp.ClientSession] = None
45 async def __aenter__(self):
46 """Async context manager entry"""
47 connector = aiohttp.TCPConnector(limit=self.max_concurrent)
48 timeout = aiohttp.ClientTimeout(total=self.timeout)
49 self.session = aiohttp.ClientSession(
50 connector=connector,
51 timeout=timeout,
52 headers={
53 "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
54 },
55 )
56 return self
58 async def __aexit__(self, exc_type, exc_val, exc_tb):
59 """Async context manager exit"""
60 if self.session:
61 await self.session.close()
63 async def fetch_url(self, url: str) -> HTTPResponse:
64 """Fetch single URL"""
65 try:
66 if self.session is None:
67 return HTTPResponse(
68 status_code=0,
69 url=url,
70 load_time=0,
71 success=False,
72 error_message="Session not initialized",
73 )
74 start_time = asyncio.get_event_loop().time()
75 async with self.session.get(url, allow_redirects=True) as response:
76 load_time = asyncio.get_event_loop().time() - start_time
77 success = 200 <= response.status < 300
78 return HTTPResponse(
79 status_code=response.status,
80 url=str(response.url),
81 load_time=load_time,
82 success=success,
83 )
84 except asyncio.TimeoutError:
85 return HTTPResponse(
86 status_code=0,
87 url=url,
88 load_time=self.timeout,
89 success=False,
90 error_message="Request timeout",
91 )
92 except aiohttp.ClientError as e:
93 return HTTPResponse(
94 status_code=0,
95 url=url,
96 load_time=0.0,
97 success=False,
98 error_message=f"HTTP client error: {str(e)}",
99 )
100 except Exception as e:
101 return HTTPResponse(
102 status_code=0,
103 url=url,
104 load_time=0.0,
105 success=False,
106 error_message=f"Unexpected error: {str(e)}",
107 )
109 async def fetch_urls(self, urls: List[str]) -> List[HTTPResponse]:
110 """Fetch multiple URLs concurrently"""
111 async with self:
112 tasks = [self.fetch_url(url) for url in urls]
113 return await asyncio.gather(*tasks)
116def extract_links_from_text(text: str, base_url: Optional[str] = None) -> List[str]:
117 """Extract links from text"""
118 links = []
120 # Markdown link pattern: [text](url)
121 markdown_pattern = r"\[([^\]]+)\]\(([^)]+)\)"
122 markdown_matches = re.findall(markdown_pattern, text)
124 for match in markdown_matches:
125 url = match[1]
126 # Convert relative URLs to absolute URLs
127 if url.startswith(("http://", "https://")):
128 links.append(url)
129 elif base_url and url.startswith("/"):
130 links.append(f"{base_url}{url}")
131 elif base_url and not url.startswith(("http://", "https://", "#")):
132 links.append(f"{base_url}/{url.rstrip('/')}")
134 # General URL pattern
135 url_pattern = r'https?://[^\s<>"\'()]+'
136 url_matches = re.findall(url_pattern, text)
137 links.extend(url_matches)
139 logger.info(f"Found {len(links)} links in text")
140 return list(set(links)) # Remove duplicates
143def is_valid_url(url: str) -> bool:
144 """Validate URL"""
145 try:
146 result = urlparse(url)
147 return all([result.scheme, result.netloc])
148 except Exception:
149 return False
152def create_report_path(base_path: Path, suffix: str = "report") -> Path:
153 """Create report file path"""
154 timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
155 filename = f"{suffix}_{timestamp}.md"
156 return base_path / filename
159def format_duration(seconds: float) -> str:
160 """Convert time (seconds) to readable format"""
161 if seconds < 1:
162 return f"{seconds*1000:.0f}ms"
163 elif seconds < 60:
164 return f"{seconds:.1f}s"
165 elif seconds < 3600:
166 minutes = int(seconds // 60)
167 remaining_seconds = seconds % 60
168 return f"{minutes}m {remaining_seconds:.0f}s"
169 else:
170 hours = int(seconds // 3600)
171 remaining_minutes = int((seconds % 3600) // 60)
172 return f"{hours}h {remaining_minutes}m"
175def calculate_score(
176 values: List[float], weights: Optional[List[float]] = None
177) -> float:
178 """Calculate weighted average score"""
179 if not values:
180 return 0.0
182 if weights is None:
183 weights = [1.0] * len(values)
185 if len(values) != len(weights):
186 raise ValueError("Values and weights must have the same length")
188 weighted_sum = sum(v * w for v, w in zip(values, weights))
189 total_weight = sum(weights)
191 return weighted_sum / total_weight if total_weight > 0 else 0.0
194def get_summary_stats(numbers: List[float]) -> Dict[str, float]:
195 """Calculate basic statistics"""
196 if not numbers:
197 return {"mean": 0.0, "min": 0.0, "max": 0.0, "std": 0.0}
199 mean = sum(numbers) / len(numbers)
200 min_val = min(numbers)
201 max_val = max(numbers)
203 # Calculate standard deviation
204 if len(numbers) > 1:
205 variance = sum((x - mean) ** 2 for x in numbers) / (len(numbers) - 1)
206 std_dev = variance**0.5
207 else:
208 std_dev = 0.0
210 return {"mean": mean, "min": min_val, "max": max_val, "std": std_dev}
213class RateLimiter:
214 """Request rate limiter"""
216 def __init__(self, max_requests: int = 10, time_window: int = 60):
217 self.max_requests = max_requests
218 self.time_window = time_window
219 self.requests: List[datetime] = []
221 def can_make_request(self) -> bool:
222 """Check if request can be made"""
223 now = datetime.now()
225 # Remove old requests
226 self.requests = [
227 req_time
228 for req_time in self.requests
229 if (now - req_time).total_seconds() < self.time_window
230 ]
232 return len(self.requests) < self.max_requests
234 def add_request(self):
235 """Add request record"""
236 if self.can_make_request():
237 self.requests.append(datetime.now())
238 else:
239 raise RateLimitError(
240 f"Rate limit exceeded: {self.max_requests} requests per {self.time_window}s"
241 )
243 async def wait_if_needed(self):
244 """Wait until request can be made"""
245 if not self.can_make_request():
246 oldest_request = min(self.requests)
247 wait_time = (
248 self.time_window - (datetime.now() - oldest_request).total_seconds()
249 )
250 if wait_time > 0:
251 logger.info(f"Rate limiting: waiting {wait_time:.1f}s")
252 await asyncio.sleep(wait_time)
255class RateLimitError(Exception):
256 """Rate limit error"""
258 pass
261def load_hook_timeout() -> int:
262 """
263 Load Hook timeout setting from .moai/config/config.json
265 Returns:
266 int: timeout value (milliseconds), returns default 5000 if not configured
267 """
268 try:
269 config_path = Path(".moai/config/config.json")
270 if not config_path.exists():
271 return 5000 # Default value
273 with open(config_path, "r", encoding="utf-8") as f:
274 config = json.load(f)
276 # Get timeout_ms value from hooks section
277 hooks_config = config.get("hooks", {})
278 timeout_ms = hooks_config.get("timeout_ms", 5000)
280 return int(timeout_ms)
281 except (json.JSONDecodeError, FileNotFoundError, KeyError, ValueError):
282 logger.warning("Failed to load hook timeout from config, using default 5000ms")
283 return 5000
286def get_graceful_degradation() -> bool:
287 """
288 Load graceful_degradation setting from .moai/config/config.json
290 Returns:
291 bool: graceful_degradation setting value, returns default True if not configured
292 """
293 try:
294 config_path = Path(".moai/config/config.json")
295 if not config_path.exists():
296 return True # Default value
298 with open(config_path, "r", encoding="utf-8") as f:
299 config = json.load(f)
301 # Get graceful_degradation value from hooks section
302 hooks_config = config.get("hooks", {})
303 return hooks_config.get("graceful_degradation", True)
304 except (json.JSONDecodeError, FileNotFoundError, KeyError):
305 logger.warning(
306 "Failed to load graceful_degradation from config, using default True"
307 )
308 return True