handle none type in llm.py

This commit is contained in:
Naman 2024-05-25 00:32:13 +05:30
parent 23e631765e
commit c5695975c7

View File

@ -59,9 +59,10 @@ async def stream_openai_response(
full_response = "" full_response = ""
async for chunk in stream: # type: ignore async for chunk in stream: # type: ignore
assert isinstance(chunk, ChatCompletionChunk) assert isinstance(chunk, ChatCompletionChunk)
content = chunk.choices[0].delta.content or "" if chunk.choices and chunk.choices[0].delta.content:
full_response += content content = chunk.choices[0].delta.content or ""
await callback(content) full_response += content
await callback(content)
await client.close() await client.close()