support Claude Sonnet 3.5

This commit is contained in:
Abi Raja 2024-06-22 19:39:49 +08:00
parent 392b9849a2
commit 49a24d8b0c
4 changed files with 17 additions and 4 deletions

View File

@ -15,7 +15,7 @@ async def generate_code_core(image_url: str, stack: Stack, model: Llm) -> str:
async def process_chunk(content: str):
pass
if model == Llm.CLAUDE_3_SONNET:
if model == Llm.CLAUDE_3_SONNET or model == Llm.CLAUDE_3_5_SONNET_2024_06_20:
if not anthropic_api_key:
raise Exception("Anthropic API key not found")
@ -23,6 +23,7 @@ async def generate_code_core(image_url: str, stack: Stack, model: Llm) -> str:
prompt_messages,
api_key=anthropic_api_key,
callback=lambda x: process_chunk(x),
model=model,
)
else:
if not openai_api_key:

View File

@ -17,6 +17,7 @@ class Llm(Enum):
CLAUDE_3_SONNET = "claude-3-sonnet-20240229"
CLAUDE_3_OPUS = "claude-3-opus-20240229"
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
CLAUDE_3_5_SONNET_2024_06_20 = "claude-3-5-sonnet-20240620"
# Will throw errors if you send a garbage string
@ -59,7 +60,12 @@ async def stream_openai_response(
full_response = ""
async for chunk in stream: # type: ignore
assert isinstance(chunk, ChatCompletionChunk)
if chunk.choices and len(chunk.choices) > 0 and chunk.choices[0].delta and chunk.choices[0].delta.content:
if (
chunk.choices
and len(chunk.choices) > 0
and chunk.choices[0].delta
and chunk.choices[0].delta.content
):
content = chunk.choices[0].delta.content or ""
full_response += content
await callback(content)
@ -74,12 +80,12 @@ async def stream_claude_response(
messages: List[ChatCompletionMessageParam],
api_key: str,
callback: Callable[[str], Awaitable[None]],
model: Llm,
) -> str:
client = AsyncAnthropic(api_key=api_key)
# Base parameters
model = Llm.CLAUDE_3_SONNET
max_tokens = 4096
temperature = 0.0

View File

@ -245,7 +245,10 @@ async def stream_code(websocket: WebSocket):
include_thinking=True,
)
exact_llm_version = Llm.CLAUDE_3_OPUS
elif code_generation_model == Llm.CLAUDE_3_SONNET:
elif (
code_generation_model == Llm.CLAUDE_3_SONNET
or code_generation_model == Llm.CLAUDE_3_5_SONNET_2024_06_20
):
if not anthropic_api_key:
await throw_error(
"No Anthropic API key found. Please add the environment variable ANTHROPIC_API_KEY to backend/.env or in the settings dialog"
@ -256,6 +259,7 @@ async def stream_code(websocket: WebSocket):
prompt_messages, # type: ignore
api_key=anthropic_api_key,
callback=lambda x: process_chunk(x),
model=code_generation_model,
)
exact_llm_version = code_generation_model
else:

View File

@ -2,6 +2,7 @@
// Order here matches dropdown order
export enum CodeGenerationModel {
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13",
CLAUDE_3_5_SONNET_2024_06_20 = "claude-3-5-sonnet-20240620",
GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09",
GPT_4_VISION = "gpt_4_vision",
CLAUDE_3_SONNET = "claude_3_sonnet",
@ -12,6 +13,7 @@ export const CODE_GENERATION_MODEL_DESCRIPTIONS: {
[key in CodeGenerationModel]: { name: string; inBeta: boolean };
} = {
"gpt-4o-2024-05-13": { name: "GPT-4o 🌟", inBeta: false },
"claude-3-5-sonnet-20240620": { name: "Claude 3.5 Sonnet 🌟", inBeta: false },
"gpt-4-turbo-2024-04-09": { name: "GPT-4 Turbo (Apr 2024)", inBeta: false },
gpt_4_vision: { name: "GPT-4 Vision (Nov 2023)", inBeta: false },
claude_3_sonnet: { name: "Claude 3 Sonnet", inBeta: false },