From c5695975c710a6dd5d3f8c311c53857ca7f73d4a Mon Sep 17 00:00:00 2001 From: Naman <1608naman@gmail.com> Date: Sat, 25 May 2024 00:32:13 +0530 Subject: [PATCH] handle none type in llm.py --- backend/llm.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/backend/llm.py b/backend/llm.py index e541046..44f97e1 100644 --- a/backend/llm.py +++ b/backend/llm.py @@ -59,9 +59,10 @@ async def stream_openai_response( full_response = "" async for chunk in stream: # type: ignore assert isinstance(chunk, ChatCompletionChunk) - content = chunk.choices[0].delta.content or "" - full_response += content - await callback(content) + if chunk.choices and chunk.choices[0].delta.content: + content = chunk.choices[0].delta.content or "" + full_response += content + await callback(content) await client.close()