Coverage for .claude/hooks/moai/lib/json_utils.py: 100.00%

83 statements  

« prev     ^ index     » next       coverage.py v7.11.3, created at 2025-11-19 08:00 +0900

1#!/usr/bin/env python3 

2"""JSON Utilities for Alfred Hooks 

3 

4Provides consistent JSON handling, validation, and serialization across all hooks. 

5""" 

6 

7import json 

8import sys 

9from pathlib import Path 

10from typing import Any, Dict, List, Optional, Union 

11 

12 

13class JSONUtils: 

14 """Utility class for consistent JSON handling in hooks.""" 

15 

16 @staticmethod 

17 def read_json_from_stdin() -> Dict[str, Any]: 

18 """Read and parse JSON from stdin. 

19 

20 Returns: 

21 Parsed JSON data as dictionary 

22 

23 Raises: 

24 JSONDecodeError: If JSON parsing fails 

25 """ 

26 input_data = sys.stdin.read() 

27 if input_data.strip(): 

28 return json.loads(input_data) 

29 return {} 

30 

31 @staticmethod 

32 def safe_json_loads(json_str: str, default: Optional[Any] = None) -> Union[Dict[str, Any], Any]: 

33 """Safely parse JSON string with fallback. 

34 

35 Args: 

36 json_str: JSON string to parse 

37 default: Default value if parsing fails 

38 

39 Returns: 

40 Parsed JSON data or default value 

41 """ 

42 try: 

43 if json_str.strip(): 

44 return json.loads(json_str) 

45 except json.JSONDecodeError: 

46 pass 

47 

48 return default if default is not None else {} 

49 

50 @staticmethod 

51 def safe_json_load_file(file_path: Path, default: Optional[Any] = None) -> Union[Dict[str, Any], Any]: 

52 """Safely load JSON from file with fallback. 

53 

54 Args: 

55 file_path: Path to JSON file 

56 default: Default value if file doesn't exist or parsing fails 

57 

58 Returns: 

59 Parsed JSON data or default value 

60 """ 

61 try: 

62 if file_path.exists(): 

63 with open(file_path, 'r', encoding='utf-8') as f: 

64 return json.load(f) 

65 except (json.JSONDecodeError, IOError, OSError): 

66 pass 

67 

68 return default if default is not None else {} 

69 

70 @staticmethod 

71 def write_json_to_file(data: Dict[str, Any], file_path: Path, indent: int = 2) -> bool: 

72 """Write JSON data to file with error handling. 

73 

74 Args: 

75 data: JSON data to write 

76 file_path: Path to write to 

77 indent: JSON indentation level 

78 

79 Returns: 

80 True if successful, False otherwise 

81 """ 

82 try: 

83 # Ensure parent directory exists 

84 file_path.parent.mkdir(parents=True, exist_ok=True) 

85 

86 with open(file_path, 'w', encoding='utf-8') as f: 

87 json.dump(data, f, indent=indent, ensure_ascii=False) 

88 return True 

89 except (IOError, OSError, TypeError): 

90 return False 

91 

92 @staticmethod 

93 def validate_json_schema(data: Dict[str, Any], required_fields: List[str]) -> bool: 

94 """Validate JSON data has required fields. 

95 

96 Args: 

97 data: JSON data to validate 

98 required_fields: List of required field names 

99 

100 Returns: 

101 True if all required fields are present, False otherwise 

102 """ 

103 if not isinstance(data, dict): 

104 return False 

105 

106 return all(field in data for field in required_fields) 

107 

108 @staticmethod 

109 def get_nested_value(data: Dict[str, Any], keys: List[str], default: Optional[Any] = None) -> Any: 

110 """Get nested value from dictionary using dot notation. 

111 

112 Args: 

113 data: Dictionary to search in 

114 keys: List of keys (path) 

115 default: Default value if key not found 

116 

117 Returns: 

118 Nested value or default 

119 """ 

120 current = data 

121 for key in keys: 

122 if isinstance(current, dict) and key in current: 

123 current = current[key] 

124 else: 

125 return default 

126 return current 

127 

128 @staticmethod 

129 def merge_json(base: Dict[str, Any], updates: Dict[str, Any]) -> Dict[str, Any]: 

130 """Merge two JSON dictionaries recursively. 

131 

132 Args: 

133 base: Base dictionary 

134 updates: Dictionary with updates 

135 

136 Returns: 

137 Merged dictionary 

138 """ 

139 result = base.copy() 

140 

141 for key, value in updates.items(): 

142 if key in result and isinstance(result[key], dict) and isinstance(value, dict): 

143 result[key] = JSONUtils.merge_json(result[key], value) 

144 else: 

145 result[key] = value 

146 

147 return result 

148 

149 @staticmethod 

150 def create_standard_response( 

151 success: bool = True, 

152 message: Optional[str] = None, 

153 error: Optional[str] = None, 

154 data: Optional[Dict[str, Any]] = None 

155 ) -> Dict[str, Any]: 

156 """Create standardized JSON response. 

157 

158 Args: 

159 success: Whether operation was successful 

160 message: Descriptive message 

161 error: Error message if failed 

162 data: Additional data payload 

163 

164 Returns: 

165 Standardized response dictionary 

166 """ 

167 response = {"success": success} 

168 

169 if message: 

170 response["message"] = message 

171 if error: 

172 response["error"] = error 

173 if data: 

174 response["data"] = data 

175 

176 return response 

177 

178 @staticmethod 

179 def compact_json(data: Dict[str, Any]) -> str: 

180 """Generate compact JSON string without whitespace. 

181 

182 Args: 

183 data: Dictionary to serialize 

184 

185 Returns: 

186 Compact JSON string 

187 """ 

188 return json.dumps(data, separators=(',', ':'), ensure_ascii=False) 

189 

190 @staticmethod 

191 def pretty_json(data: Dict[str, Any], indent: int = 2) -> str: 

192 """Generate pretty JSON string with indentation. 

193 

194 Args: 

195 data: Dictionary to serialize 

196 indent: Indentation level 

197 

198 Returns: 

199 Pretty JSON string 

200 """ 

201 return json.dumps(data, indent=indent, ensure_ascii=False) 

202 

203 

204# Common JSON validation schemas 

205class JSONSchemas: 

206 """Common JSON schemas for hook validation.""" 

207 

208 # Hook input schema 

209 HOOK_INPUT_SCHEMA = { 

210 "type": "object", 

211 "properties": { 

212 "tool_name": {"type": "string"}, 

213 "tool_args": {"type": "object"}, 

214 "tool_result": {"type": "object"} 

215 }, 

216 "required": ["tool_name"] 

217 } 

218 

219 # Hook configuration schema 

220 CONFIG_SCHEMA = { 

221 "type": "object", 

222 "properties": { 

223 "hooks": { 

224 "type": "object", 

225 "properties": { 

226 "timeout": {"type": "number", "minimum": 1}, 

227 "enabled": {"type": "boolean"}, 

228 "graceful_degradation": {"type": "boolean"} 

229 } 

230 }, 

231 "tags": { 

232 "type": "object", 

233 "properties": { 

234 "policy": { 

235 "type": "object", 

236 "properties": { 

237 "enforcement_mode": {"type": "string"}, 

238 "require_spec_before_code": {"type": "boolean"}, 

239 "require_test_for_code": {"type": "boolean"} 

240 } 

241 } 

242 } 

243 } 

244 } 

245 } 

246 

247 @staticmethod 

248 def validate_input_schema(data: Dict[str, Any]) -> bool: 

249 """Validate hook input data against schema.""" 

250 return JSONUtils.validate_json_schema(data, ["tool_name"]) 

251 

252 @staticmethod 

253 def validate_config_schema(data: Dict[str, Any]) -> bool: 

254 """Validate configuration data against schema.""" 

255 # Basic validation - can be extended with more detailed validation 

256 return isinstance(data, dict) and ("hooks" in data or "tags" in data)