fix bug with using enum as string

This commit is contained in:
Abi Raja 2024-03-19 10:30:58 -04:00
parent 81c4fbe28d
commit 212aa228ab

View File

@ -35,7 +35,12 @@ async def stream_openai_response(
model = Llm.GPT_4_VISION
# Base parameters
params = {"model": model, "messages": messages, "stream": True, "timeout": 600}
params = {
"model": model.value,
"messages": messages,
"stream": True,
"timeout": 600,
}
# Add 'max_tokens' only if the model is a GPT4 vision model
if model == Llm.GPT_4_VISION: