handle none type in llm.py
This commit is contained in:
parent
23e631765e
commit
c5695975c7
@ -59,9 +59,10 @@ async def stream_openai_response(
|
||||
full_response = ""
|
||||
async for chunk in stream: # type: ignore
|
||||
assert isinstance(chunk, ChatCompletionChunk)
|
||||
content = chunk.choices[0].delta.content or ""
|
||||
full_response += content
|
||||
await callback(content)
|
||||
if chunk.choices and chunk.choices[0].delta.content:
|
||||
content = chunk.choices[0].delta.content or ""
|
||||
full_response += content
|
||||
await callback(content)
|
||||
|
||||
await client.close()
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user