From c5695975c710a6dd5d3f8c311c53857ca7f73d4a Mon Sep 17 00:00:00 2001 From: Naman <1608naman@gmail.com> Date: Sat, 25 May 2024 00:32:13 +0530 Subject: [PATCH 1/2] handle none type in llm.py --- backend/llm.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/backend/llm.py b/backend/llm.py index e541046..44f97e1 100644 --- a/backend/llm.py +++ b/backend/llm.py @@ -59,9 +59,10 @@ async def stream_openai_response( full_response = "" async for chunk in stream: # type: ignore assert isinstance(chunk, ChatCompletionChunk) - content = chunk.choices[0].delta.content or "" - full_response += content - await callback(content) + if chunk.choices and chunk.choices[0].delta.content: + content = chunk.choices[0].delta.content or "" + full_response += content + await callback(content) await client.close() From f0ee68666092b2579147aabfc5846f2568c983a3 Mon Sep 17 00:00:00 2001 From: Naman <1608naman@gmail.com> Date: Sat, 25 May 2024 07:02:02 +0530 Subject: [PATCH 2/2] null check for chunk.choices --- backend/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/llm.py b/backend/llm.py index 44f97e1..e32051c 100644 --- a/backend/llm.py +++ b/backend/llm.py @@ -59,7 +59,7 @@ async def stream_openai_response( full_response = "" async for chunk in stream: # type: ignore assert isinstance(chunk, ChatCompletionChunk) - if chunk.choices and chunk.choices[0].delta.content: + if chunk.choices and len(chunk.choices) > 0 and chunk.choices[0].delta and chunk.choices[0].delta.content: content = chunk.choices[0].delta.content or "" full_response += content await callback(content)