handle none type in llm.py

This commit is contained in:
Naman 2024-05-25 00:32:13 +05:30
parent 23e631765e
commit c5695975c7

View File

@ -59,6 +59,7 @@ async def stream_openai_response(
full_response = ""
async for chunk in stream: # type: ignore
assert isinstance(chunk, ChatCompletionChunk)
if chunk.choices and chunk.choices[0].delta.content:
content = chunk.choices[0].delta.content or ""
full_response += content
await callback(content)