From f0ee68666092b2579147aabfc5846f2568c983a3 Mon Sep 17 00:00:00 2001 From: Naman <1608naman@gmail.com> Date: Sat, 25 May 2024 07:02:02 +0530 Subject: [PATCH] null check for chunk.choices --- backend/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/llm.py b/backend/llm.py index 44f97e1..e32051c 100644 --- a/backend/llm.py +++ b/backend/llm.py @@ -59,7 +59,7 @@ async def stream_openai_response( full_response = "" async for chunk in stream: # type: ignore assert isinstance(chunk, ChatCompletionChunk) - if chunk.choices and chunk.choices[0].delta.content: + if chunk.choices and len(chunk.choices) > 0 and chunk.choices[0].delta and chunk.choices[0].delta.content: content = chunk.choices[0].delta.content or "" full_response += content await callback(content)