feat: Implement LLM streaming support and enhance event handling in review process

This commit is contained in:
Primakov Alexandr Alexandrovich
2025-10-13 17:48:03 +03:00
parent 2f29ccff74
commit 1d953f554b
6 changed files with 107 additions and 45 deletions
+13 -1
View File
@@ -315,6 +315,17 @@ class ReviewerAgent:
print(f" ⚠️ ПРОПУСК: patch пустой или слишком маленький")
continue
# Callback для LLM streaming
async def on_llm_chunk(chunk: str, file: str):
"""Handle LLM streaming chunks"""
if self._stream_callback:
await self._stream_callback({
"type": "llm_chunk",
"chunk": chunk,
"file_path": file,
"message": chunk
})
# Analyze diff with PR context
pr_info = state.get("pr_info", {})
comments = await self.analyzer.analyze_diff(
@@ -322,7 +333,8 @@ class ReviewerAgent:
diff=patch,
language=language,
pr_title=pr_info.get("title", ""),
pr_description=pr_info.get("description", "")
pr_description=pr_info.get("description", ""),
on_llm_chunk=on_llm_chunk
)
print(f" 💬 Получено комментариев: {len(comments)}")