Coverage for src / moai_adk / hooks / session_start / core_cleanup.py: 0.00%
98 statements
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-20 20:52 +0900
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-20 20:52 +0900
1"""Core cleanup module for session_start hook
3Handles file system cleanup operations including old file removal and stats tracking.
5Responsibilities:
6- Clean old files from reports, cache, and temp directories
7- Delete files based on age and count limits
8- Track and update cleanup statistics
9"""
11import json
12import logging
13import shutil
14from datetime import datetime, timedelta
15from pathlib import Path
16from typing import Any, Dict, List, Optional
18logger = logging.getLogger(__name__)
21class CleanupError(Exception):
22 """Exception raised for cleanup-related errors"""
24 pass
27def cleanup_old_files(config: Dict[str, Any]) -> Dict[str, int]:
28 """Clean old files from multiple directories
30 Cleans files from:
31 - .moai/reports: Old analysis reports
32 - .moai/cache: Old cache files
33 - .moai/temp: Old temporary files
35 Args:
36 config: Configuration dictionary
38 Returns:
39 Dictionary with cleanup statistics:
40 {
41 'reports_cleaned': int,
42 'cache_cleaned': int,
43 'temp_cleaned': int,
44 'total_cleaned': int
45 }
47 Raises:
48 CleanupError: If cleanup operations fail
49 """
50 stats = {
51 "reports_cleaned": 0,
52 "cache_cleaned": 0,
53 "temp_cleaned": 0,
54 "total_cleaned": 0,
55 }
57 try:
58 cleanup_config = config.get("auto_cleanup", {})
59 if not cleanup_config.get("enabled", True):
60 return stats
62 cleanup_days = cleanup_config.get("cleanup_days", 7)
63 max_reports = cleanup_config.get("max_reports", 10)
65 cutoff_date = datetime.now() - timedelta(days=cleanup_days)
67 # Clean reports directory
68 reports_dir = Path(".moai/reports")
69 if reports_dir.exists():
70 stats["reports_cleaned"] = cleanup_directory(
71 reports_dir,
72 cutoff_date,
73 max_reports,
74 patterns=["*.json", "*.md"],
75 )
77 # Clean cache directory
78 cache_dir = Path(".moai/cache")
79 if cache_dir.exists():
80 stats["cache_cleaned"] = cleanup_directory(
81 cache_dir,
82 cutoff_date,
83 None, # No count limit for cache
84 patterns=["*"],
85 )
87 # Clean temp directory
88 temp_dir = Path(".moai/temp")
89 if temp_dir.exists():
90 stats["temp_cleaned"] = cleanup_directory(
91 temp_dir,
92 cutoff_date,
93 None, # No count limit for temp
94 patterns=["*"],
95 )
97 stats["total_cleaned"] = (
98 stats["reports_cleaned"]
99 + stats["cache_cleaned"]
100 + stats["temp_cleaned"]
101 )
103 logger.info(f"Cleanup completed: {stats['total_cleaned']} files removed")
105 except Exception as e:
106 logger.error(f"File cleanup failed: {e}")
107 raise CleanupError(f"Failed to cleanup old files: {e}") from e
109 return stats
112def cleanup_directory(
113 directory: Path,
114 cutoff_date: datetime,
115 max_files: Optional[int],
116 patterns: List[str],
117) -> int:
118 """Clean files in a directory based on age and count
120 Args:
121 directory: Target directory path
122 cutoff_date: Files older than this date will be deleted
123 max_files: Maximum number of files to keep (None for unlimited)
124 patterns: File patterns to match (e.g., ["*.json", "*.md"])
126 Returns:
127 Number of files deleted
129 Raises:
130 CleanupError: If cleanup operations fail
131 """
132 if not directory.exists():
133 return 0
135 cleaned_count = 0
137 try:
138 # Collect files matching patterns
139 files_to_check: List[Path] = []
140 for pattern in patterns:
141 files_to_check.extend(directory.glob(pattern))
143 # Remove duplicates and sort by modification time (oldest first)
144 files_to_check = list(set(files_to_check))
145 files_to_check.sort(key=lambda f: f.stat().st_mtime)
147 # Process files
148 for file_path in files_to_check:
149 try:
150 if not file_path.exists():
151 continue
153 # Get file modification time
154 file_mtime = datetime.fromtimestamp(file_path.stat().st_mtime)
156 # Delete if older than cutoff date
157 should_delete = False
158 if file_mtime < cutoff_date:
159 should_delete = True
161 # Check if exceeds max file count
162 elif max_files is not None:
163 # Count existing files that are newer than cutoff
164 remaining_files = [
165 f
166 for f in files_to_check
167 if f.exists()
168 and datetime.fromtimestamp(f.stat().st_mtime) >= cutoff_date
169 ]
170 if len(remaining_files) > max_files:
171 should_delete = True
173 # Perform deletion
174 if should_delete:
175 if file_path.is_file():
176 file_path.unlink()
177 cleaned_count += 1
178 elif file_path.is_dir():
179 shutil.rmtree(file_path)
180 cleaned_count += 1
182 except OSError as e:
183 logger.warning(f"Failed to delete {file_path}: {e}")
184 continue
185 except Exception as e:
186 logger.warning(f"Unexpected error deleting {file_path}: {e}")
187 continue
189 except Exception as e:
190 logger.error(f"Directory cleanup failed for {directory}: {e}")
191 raise CleanupError(
192 f"Failed to cleanup directory {directory}: {e}"
193 ) from e
195 logger.debug(f"Cleaned {cleaned_count} files from {directory}")
196 return cleaned_count
199def update_cleanup_stats(cleanup_stats: Dict[str, int]) -> None:
200 """Update cleanup statistics in persistent storage
202 Maintains a JSON file with daily cleanup statistics for the last 30 days.
204 Args:
205 cleanup_stats: Statistics from cleanup operation
207 Raises:
208 CleanupError: If unable to write statistics
209 """
210 try:
211 stats_file = Path(".moai/cache/cleanup_stats.json")
212 stats_file.parent.mkdir(exist_ok=True, parents=True)
214 # Load existing statistics
215 existing_stats: Dict[str, Any] = {}
216 if stats_file.exists():
217 with open(stats_file, "r", encoding="utf-8") as f:
218 existing_stats = json.load(f)
220 # Add new statistics for today
221 today = datetime.now().strftime("%Y-%m-%d")
222 existing_stats[today] = {
223 "cleaned_files": cleanup_stats["total_cleaned"],
224 "reports_cleaned": cleanup_stats["reports_cleaned"],
225 "cache_cleaned": cleanup_stats["cache_cleaned"],
226 "temp_cleaned": cleanup_stats["temp_cleaned"],
227 "timestamp": datetime.now().isoformat(),
228 }
230 # Keep only last 30 days of statistics
231 cutoff_date = datetime.now() - timedelta(days=30)
232 filtered_stats: Dict[str, Any] = {}
233 for date_str, stats in existing_stats.items():
234 try:
235 stat_date = datetime.strptime(date_str, "%Y-%m-%d")
236 if stat_date >= cutoff_date:
237 filtered_stats[date_str] = stats
238 except ValueError:
239 continue
241 # Write updated statistics
242 with open(stats_file, "w", encoding="utf-8") as f:
243 json.dump(filtered_stats, f, indent=2, ensure_ascii=False)
245 logger.info(f"Cleanup stats updated: {today}")
247 except Exception as e:
248 logger.error(f"Failed to update cleanup stats: {e}")
249 raise CleanupError(f"Failed to update cleanup stats: {e}") from e