Coverage for src/lite_agent/stream_handlers/litellm.py: 98%

53 statements  

« prev     ^ index     » next       coverage.py v7.10.5, created at 2025-08-25 22:58 +0900

1from collections.abc import AsyncGenerator 

2from pathlib import Path 

3from typing import TYPE_CHECKING 

4 

5import aiofiles 

6import litellm 

7from litellm.types.llms.openai import ResponsesAPIStreamingResponse 

8from litellm.types.utils import ModelResponseStream 

9from pydantic import BaseModel 

10 

11from lite_agent.loggers import logger 

12from lite_agent.processors import CompletionEventProcessor, ResponseEventProcessor 

13from lite_agent.types import AgentChunk 

14 

15if TYPE_CHECKING: 

16 from aiofiles.threadpool.text import AsyncTextIOWrapper 

17 

18 

19def ensure_record_file(record_to: Path | str | None) -> Path | None: 

20 if not record_to: 

21 return None 

22 

23 path = Path(record_to) if isinstance(record_to, str) else record_to 

24 

25 # If the path is a directory, generate a filename 

26 if path.is_dir(): 

27 path = path / "conversation.jsonl" 

28 

29 # Ensure parent directory exists 

30 if not path.parent.exists(): 

31 logger.warning('Record directory "%s" does not exist, creating it.', path.parent) 

32 path.parent.mkdir(parents=True, exist_ok=True) 

33 

34 return path 

35 

36 

37async def litellm_completion_stream_handler( 

38 resp: litellm.CustomStreamWrapper, 

39 record_to: Path | str | None = None, 

40) -> AsyncGenerator[AgentChunk, None]: 

41 """ 

42 Optimized chunk handler 

43 """ 

44 processor = CompletionEventProcessor() 

45 record_file: AsyncTextIOWrapper | None = None 

46 record_path = ensure_record_file(record_to) 

47 if record_path: 

48 record_file = await aiofiles.open(record_path, "w", encoding="utf-8") 

49 try: 

50 async for chunk in resp: # type: ignore 

51 if not isinstance(chunk, ModelResponseStream): 

52 logger.warning("unexpected chunk type: %s", type(chunk)) 

53 logger.warning("chunk content: %s", chunk) 

54 continue 

55 async for result in processor.process_chunk(chunk, record_file): 

56 yield result 

57 finally: 

58 if record_file: 

59 await record_file.close() 

60 

61 

62async def litellm_response_stream_handler( 

63 resp: AsyncGenerator[ResponsesAPIStreamingResponse, None], 

64 record_to: Path | str | None = None, 

65) -> AsyncGenerator[AgentChunk, None]: 

66 """ 

67 Response API stream handler for processing ResponsesAPIStreamingResponse chunks 

68 """ 

69 processor = ResponseEventProcessor() 

70 record_file: AsyncTextIOWrapper | None = None 

71 record_path = ensure_record_file(record_to) 

72 if record_path: 

73 record_file = await aiofiles.open(record_path, "w", encoding="utf-8") 

74 try: 

75 async for chunk in resp: 

76 if not isinstance(chunk, BaseModel): 

77 logger.warning("unexpected chunk type: %s", type(chunk)) 

78 logger.warning("chunk content: %s", chunk) 

79 continue 

80 async for result in processor.process_chunk(chunk, record_file): 

81 yield result 

82 finally: 

83 if record_file: 

84 await record_file.close()