perfect code and available

This commit is contained in:
hyf-github-user 2024-09-07 22:21:41 +08:00
parent 21f553a0d8
commit e1d2fe9c98
15 changed files with 389 additions and 82 deletions

8
backend/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.10 (screen_code)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,125 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<Languages>
<language minSize="442" name="Python" />
</Languages>
</inspection_tool>
<inspection_tool class="HttpUrlsUsage" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredUrls">
<list>
<option value="http://localhost" />
<option value="http://127.0.0.1" />
<option value="http://0.0.0.0" />
<option value="http://www.w3.org/" />
<option value="http://json-schema.org/draft" />
<option value="http://java.sun.com/" />
<option value="http://xmlns.jcp.org/" />
<option value="http://javafx.com/javafx/" />
<option value="http://javafx.com/fxml" />
<option value="http://maven.apache.org/xsd/" />
<option value="http://maven.apache.org/POM/" />
<option value="http://www.springframework.org/schema/" />
<option value="http://www.springframework.org/tags" />
<option value="http://www.springframework.org/security/tags" />
<option value="http://www.thymeleaf.org" />
<option value="http://www.jboss.org/j2ee/schema/" />
<option value="http://www.jboss.com/xml/ns/" />
<option value="http://www.ibm.com/webservices/xsd" />
<option value="http://activemq.apache.org/schema/" />
<option value="http://schema.cloudfoundry.org/spring/" />
<option value="http://schemas.xmlsoap.org/" />
<option value="http://cxf.apache.org/schemas/" />
<option value="http://primefaces.org/ui" />
<option value="http://tiles.apache.org/" />
<option value="http://" />
</list>
</option>
</inspection_tool>
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="41">
<item index="0" class="java.lang.String" itemvalue="tensorflow" />
<item index="1" class="java.lang.String" itemvalue="numpy" />
<item index="2" class="java.lang.String" itemvalue="keras" />
<item index="3" class="java.lang.String" itemvalue="tensorflow-addons" />
<item index="4" class="java.lang.String" itemvalue="joblib" />
<item index="5" class="java.lang.String" itemvalue="scikit-learn" />
<item index="6" class="java.lang.String" itemvalue="nltk" />
<item index="7" class="java.lang.String" itemvalue="tfx" />
<item index="8" class="java.lang.String" itemvalue="urlextract" />
<item index="9" class="java.lang.String" itemvalue="requests" />
<item index="10" class="java.lang.String" itemvalue="ftfy" />
<item index="11" class="java.lang.String" itemvalue="tensorflow-probability" />
<item index="12" class="java.lang.String" itemvalue="tensorflow-serving-api" />
<item index="13" class="java.lang.String" itemvalue="tensorflow-hub" />
<item index="14" class="java.lang.String" itemvalue="tensorflow-datasets" />
<item index="15" class="java.lang.String" itemvalue="nbdime" />
<item index="16" class="java.lang.String" itemvalue="graphviz" />
<item index="17" class="java.lang.String" itemvalue="tf-agents" />
<item index="18" class="java.lang.String" itemvalue="xgboost" />
<item index="19" class="java.lang.String" itemvalue="ipywidgets" />
<item index="20" class="java.lang.String" itemvalue="scipy" />
<item index="21" class="java.lang.String" itemvalue="tensorboard-plugin-profile" />
<item index="22" class="java.lang.String" itemvalue="transformers" />
<item index="23" class="java.lang.String" itemvalue="opencv-python" />
<item index="24" class="java.lang.String" itemvalue="pyglet" />
<item index="25" class="java.lang.String" itemvalue="pandas" />
<item index="26" class="java.lang.String" itemvalue="tqdm" />
<item index="27" class="java.lang.String" itemvalue="tensorboard" />
<item index="28" class="java.lang.String" itemvalue="matplotlib" />
<item index="29" class="java.lang.String" itemvalue="gym" />
<item index="30" class="java.lang.String" itemvalue="Pillow" />
<item index="31" class="java.lang.String" itemvalue="numexpr" />
<item index="32" class="java.lang.String" itemvalue="absl" />
<item index="33" class="java.lang.String" itemvalue="selenium" />
<item index="34" class="java.lang.String" itemvalue="SQLAlchemy" />
<item index="35" class="java.lang.String" itemvalue="protobuf" />
<item index="36" class="java.lang.String" itemvalue="notebook" />
<item index="37" class="java.lang.String" itemvalue="tensorflow-gpu" />
<item index="38" class="java.lang.String" itemvalue="upyun" />
<item index="39" class="java.lang.String" itemvalue="torch" />
<item index="40" class="java.lang.String" itemvalue="torchvision" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="PyPep8Inspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredErrors">
<list>
<option value="W292" />
<option value="E501" />
<option value="E402" />
<option value="E127" />
</list>
</option>
</inspection_tool>
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredErrors">
<list>
<option value="N814" />
<option value="N802" />
<option value="N806" />
<option value="N812" />
</list>
</option>
</inspection_tool>
<inspection_tool class="PyShadowingBuiltinsInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="ignoredNames">
<list>
<option value="hash" />
</list>
</option>
</inspection_tool>
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="apps.poll.urls" />
<option value="tkinter.messagebox.Message.showinfo" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
backend/.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (screen_code)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/backend.iml" filepath="$PROJECT_DIR$/.idea/backend.iml" />
</modules>
</component>
</project>

6
backend/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -1,9 +1,9 @@
FROM python:3.12.3-slim-bullseye
FROM python:3.10-slim
ENV POETRY_VERSION 1.4.1
# Install system dependencies
RUN pip install "poetry==$POETRY_VERSION"
RUN pip install "poetry==$POETRY_VERSION" --index-url https://pypi.tuna.tsinghua.edu.cn/simple
# Set work directory
WORKDIR /app
@ -15,7 +15,9 @@ COPY poetry.lock pyproject.toml /app/
RUN poetry config virtualenvs.create false
# Install dependencies
RUN poetry install
RUN poetry export -f requirements.txt --output requirements.txt --without-hashes
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt --index-url https://pypi.tuna.tsinghua.edu.cn/simple
# Copy the current directory contents into the container at /app
COPY ./ /app/

View File

@ -14,8 +14,8 @@ from utils import pprint_prompt
# Actual model versions that are passed to the LLMs and stored in our logs
class Llm(Enum):
GPT_4_VISION = "gpt-4-vision-preview"
GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09"
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13"
GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-preview"
GPT_4O_2024_05_13 = "gpt-4o"
CLAUDE_3_SONNET = "claude-3-sonnet-20240229"
CLAUDE_3_OPUS = "claude-3-opus-20240229"
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
@ -33,11 +33,11 @@ def convert_frontend_str_to_llm(frontend_str: str) -> Llm:
async def stream_openai_response(
messages: List[ChatCompletionMessageParam],
api_key: str,
base_url: str | None,
callback: Callable[[str], Awaitable[None]],
model: Llm,
messages: List[ChatCompletionMessageParam],
api_key: str,
base_url: str | None,
callback: Callable[[str], Awaitable[None]],
model: Llm,
) -> str:
client = AsyncOpenAI(api_key=api_key, base_url=base_url)
@ -52,21 +52,20 @@ async def stream_openai_response(
# Add 'max_tokens' only if the model is a GPT4 vision or Turbo model
if (
model == Llm.GPT_4_VISION
or model == Llm.GPT_4_TURBO_2024_04_09
or model == Llm.GPT_4O_2024_05_13
model == Llm.GPT_4_VISION
or model == Llm.GPT_4_TURBO_2024_04_09
or model == Llm.GPT_4O_2024_05_13
):
params["max_tokens"] = 4096
stream = await client.chat.completions.create(**params) # type: ignore
full_response = ""
async for chunk in stream: # type: ignore
assert isinstance(chunk, ChatCompletionChunk)
if (
chunk.choices
and len(chunk.choices) > 0
and chunk.choices[0].delta
and chunk.choices[0].delta.content
chunk.choices
and len(chunk.choices) > 0
and chunk.choices[0].delta
and chunk.choices[0].delta.content
):
content = chunk.choices[0].delta.content or ""
full_response += content
@ -79,12 +78,11 @@ async def stream_openai_response(
# TODO: Have a seperate function that translates OpenAI messages to Claude messages
async def stream_claude_response(
messages: List[ChatCompletionMessageParam],
api_key: str,
callback: Callable[[str], Awaitable[None]],
model: Llm,
messages: List[ChatCompletionMessageParam],
api_key: str,
callback: Callable[[str], Awaitable[None]],
model: Llm,
) -> str:
client = AsyncAnthropic(api_key=api_key)
# Base parameters
@ -121,12 +119,12 @@ async def stream_claude_response(
# Stream Claude response
async with client.messages.stream(
model=model.value,
max_tokens=max_tokens,
temperature=temperature,
system=system_prompt,
messages=claude_messages, # type: ignore
extra_headers={"anthropic-beta": "max-tokens-3-5-sonnet-2024-07-15"},
model=model.value,
max_tokens=max_tokens,
temperature=temperature,
system=system_prompt,
messages=claude_messages, # type: ignore
extra_headers={"anthropic-beta": "max-tokens-3-5-sonnet-2024-07-15"},
) as stream:
async for text in stream.text_stream:
await callback(text)
@ -141,14 +139,13 @@ async def stream_claude_response(
async def stream_claude_response_native(
system_prompt: str,
messages: list[Any],
api_key: str,
callback: Callable[[str], Awaitable[None]],
include_thinking: bool = False,
model: Llm = Llm.CLAUDE_3_OPUS,
system_prompt: str,
messages: list[Any],
api_key: str,
callback: Callable[[str], Awaitable[None]],
include_thinking: bool = False,
model: Llm = Llm.CLAUDE_3_OPUS,
) -> str:
client = AsyncAnthropic(api_key=api_key)
# Base model parameters
@ -179,11 +176,11 @@ async def stream_claude_response_native(
pprint_prompt(messages_to_send)
async with client.messages.stream(
model=model.value,
max_tokens=max_tokens,
temperature=temperature,
system=system_prompt,
messages=messages_to_send, # type: ignore
model=model.value,
max_tokens=max_tokens,
temperature=temperature,
system=system_prompt,
messages=messages_to_send, # type: ignore
) as stream:
async for text in stream.text_stream:
print(text, end="", flush=True)

View File

@ -27,7 +27,6 @@ from utils import pprint_prompt
from video.utils import extract_tag_content, assemble_claude_prompt_video
from ws.constants import APP_ERROR_WEB_SOCKET_CODE # type: ignore
router = APIRouter()
@ -43,11 +42,13 @@ def write_logs(prompt_messages: List[ChatCompletionMessageParam], completion: st
print("Writing to logs directory:", logs_directory)
# Generate a unique filename using the current timestamp within the logs directory
filename = datetime.now().strftime(f"{logs_directory}/messages_%Y%m%d_%H%M%S.json")
filename = datetime.now().strftime(
f"{logs_directory}/messages_%Y%m%d_%H%M%S.json")
# Write the messages dict into a new file for each run
with open(filename, "w") as f:
f.write(json.dumps({"prompt": prompt_messages, "completion": completion}))
f.write(json.dumps(
{"prompt": prompt_messages, "completion": completion}))
@router.websocket("/generate-code")
@ -57,7 +58,7 @@ async def stream_code(websocket: WebSocket):
print("Incoming websocket connection...")
async def throw_error(
message: str,
message: str,
):
await websocket.send_json({"type": "error", "value": message})
await websocket.close(APP_ERROR_WEB_SOCKET_CODE)
@ -89,8 +90,10 @@ async def stream_code(websocket: WebSocket):
code_generation_model_str = params.get(
"codeGenerationModel", Llm.GPT_4O_2024_05_13.value
)
print("codeGenerationModel======", code_generation_model_str)
try:
code_generation_model = convert_frontend_str_to_llm(code_generation_model_str)
code_generation_model = convert_frontend_str_to_llm(
code_generation_model_str)
except:
await throw_error(f"Invalid model: {code_generation_model_str}")
raise Exception(f"Invalid model: {code_generation_model_str}")
@ -125,9 +128,9 @@ async def stream_code(websocket: WebSocket):
print("Using OpenAI API key from environment variable")
if not openai_api_key and (
code_generation_model == Llm.GPT_4_VISION
or code_generation_model == Llm.GPT_4_TURBO_2024_04_09
or code_generation_model == Llm.GPT_4O_2024_05_13
code_generation_model == Llm.GPT_4_VISION
or code_generation_model == Llm.GPT_4_TURBO_2024_04_09
or code_generation_model == Llm.GPT_4O_2024_05_13
):
print("OpenAI API key not found")
await throw_error(
@ -261,8 +264,8 @@ async def stream_code(websocket: WebSocket):
)
exact_llm_version = Llm.CLAUDE_3_OPUS
elif (
code_generation_model == Llm.CLAUDE_3_SONNET
or code_generation_model == Llm.CLAUDE_3_5_SONNET_2024_06_20
code_generation_model == Llm.CLAUDE_3_SONNET
or code_generation_model == Llm.CLAUDE_3_5_SONNET_2024_06_20
):
if not anthropic_api_key:
await throw_error(
@ -285,39 +288,40 @@ async def stream_code(websocket: WebSocket):
callback=lambda x: process_chunk(x),
model=code_generation_model,
)
exact_llm_version = code_generation_model
except openai.AuthenticationError as e:
print("[GENERATE_CODE] Authentication failed", e)
error_message = (
"Incorrect OpenAI key. Please make sure your OpenAI API key is correct, or create a new OpenAI API key on your OpenAI dashboard."
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
"Incorrect OpenAI key. Please make sure your OpenAI API key is correct, or create a new OpenAI API key on your OpenAI dashboard."
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
)
return await throw_error(error_message)
except openai.NotFoundError as e:
print("[GENERATE_CODE] Model not found", e)
error_message = (
e.message
+ ". Please make sure you have followed the instructions correctly to obtain an OpenAI key with GPT vision access: https://github.com/abi/screenshot-to-code/blob/main/Troubleshooting.md"
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
e.message
+ ". Please make sure you have followed the instructions correctly to obtain an OpenAI key with GPT vision access: https://github.com/abi/screenshot-to-code/blob/main/Troubleshooting.md"
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
)
return await throw_error(error_message)
except openai.RateLimitError as e:
print("[GENERATE_CODE] Rate limit exceeded", e)
error_message = (
"OpenAI error - 'You exceeded your current quota, please check your plan and billing details.'"
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
"OpenAI error - 'You exceeded your current quota, please check your plan and billing details.'"
+ (
" Alternatively, you can purchase code generation credits directly on this website."
if IS_PROD
else ""
)
)
return await throw_error(error_message)

View File

@ -1,7 +1,6 @@
from fastapi import APIRouter
from fastapi.responses import HTMLResponse
router = APIRouter()

View File

@ -0,0 +1,53 @@
# web前台部署
server {
listen 443 ssl;
server_name huyinfu.space;
location / {
root /usr/share/nginx/frontend/; # dist文件挂载到容器的目录
try_files $uri $uri/ /index.html;
index index.html;
}
# 如下SSL配置
# 配置HTTPS的默认访问端口为443。
# 如果未在此处配置HTTPS的默认访问端口可能会造成Nginx无法启动。
# 如果您使用Nginx 1.15.0及以上版本请使用listen 443 ssl代替listen 443和ssl on。
ssl_certificate /etc/nginx/conf.d/web_certs/huyinfu.space.pem; # 需要将cert-file-name.pem替换成已上传的证书文件的名称。
ssl_certificate_key /etc/nginx/conf.d/web_certs/huyinfu.space.key; # 需要将cert-file-name.key替换成已上传的证书密钥文件的名称。
ssl_session_timeout 5m;
ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE:ECDH:AES:HIGH:!NULL:!aNULL:!MD5:!ADH:!RC4;
# 表示使用的加密套件的类型。
ssl_protocols TLSv1.1 TLSv1.2; # 表示使用的TLS协议的类型。
ssl_prefer_server_ciphers on;
}
server {
listen 80;
server_name huyinfu.space; #需要将yourdomain替换成证书绑定的域名。
rewrite ^(.*)$ https://$host$1; #将所有HTTP请求通过rewrite指令重定向到HTTPS。
location / {
index index.html index.htm;
}
}
# 后端部署
server {
listen 443 ssl;
server_name api.huyinfu.space;
# 配置HTTPS的默认访问端口为443。
# 如果未在此处配置HTTPS的默认访问端口可能会造成Nginx无法启动。
# 如果您使用Nginx 1.15.0及以上版本请使用listen 443 ssl代替listen 443和ssl on。
ssl_certificate /etc/nginx/conf.d/api_certs/api.huyinfu.space.pem; # 需要将cert-file-name.pem替换成已上传的证书文件的名称。
ssl_certificate_key /etc/nginx/conf.d/api_certs/api.huyinfu.space.key; # 需要将cert-file-name.key替换成已上传的证书密钥文件的名称。
ssl_session_timeout 5m;
ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE:ECDH:AES:HIGH:!NULL:!aNULL:!MD5:!ADH:!RC4;
# 表示使用的加密套件的类型。
ssl_protocols TLSv1.1 TLSv1.2; # 表示使用的TLS协议的类型。
ssl_prefer_server_ciphers on;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $http_host;
proxy_pass http://huyinfu.space:7001/;
}
}

View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAywS+PTGLpIzq6eWOblqsF1cBUDxSaWjSR/y6oTYO6ST1oE7s
P8MKCrwgq9/zUxem6LyNOIShXW9aNoCKeZmQQWo/LTz/rLbl6ZtvZV/mJa8fKCxd
ZHnpp+PpCu5gtdIxPFQpxrK9l3SRyzk61fyXRLhTXWfQKAAFFay9ReXvwH6PlIhT
3ym4I5to9wCulchelgsLwijJZkLAkODKYWr6ZtLLCi5kb50NZnS4mSjO7acGae7Y
uxyrhc+ZPfkK+hngpiuAWLgvSn8jJywuvl2cYAv+jMNkmmScwHublFfCYknpzvxe
DWCcrsZq+GtDRJNMgH/8Bc1TxlR5qq8hjzvirwIDAQABAoIBABx9wJT7QbWzcQ78
1aL3EUpVPiycdT8FzuemDYBXrdAFDSjqP+LS5F3io6h7TWf5McYHxWnhHc7sXvoM
dWKLV+CroHd2JWDVFNRWqVN4xJEVT9y7cn/fUW3rtjt/02Kw6j1P3Rey40NAwL/c
euf9At2fYKkDILCnEA+dURK2KjzBRjH1D9cbk++oAUMuf4/uCFZ3eqWmMRk2SVx0
2DJZ/PWO4Xl2JU4As6xOazrPzt/+kQ5ZMglpwpXzvIODrBvZvLfLiuKZvd8TzWqO
tnYUnP8j8joxK5pxRwhIv7gQi9byr0dPRAhrfJLviB3Zswh0XQt6J/1XIFpLURjN
s6T8X0ECgYEA6lcGEGDZt8GfHe58TTPnteZI/jBhFQsO37yrKnf2GZm0Q3vQ11f3
9L1+Lys8fDM2us/AszViqAc3Q/CeQwrdKuuNrdfPrFCWBsvdIQ7tfvZcPRQ4BKnc
iGARLptmwRIY5kOxTqy54BAS+V1pI8n8RDgQUO5kPWNKffy66+uccU0CgYEA3ciW
xhYnnOf/1I5/s72vltJaCJok5Blh+WUlPXBC8byu2USF0KtoL4e5+41iSwOx9GZ1
GCnhKsRpptJJDUX9wHLokN3bXFtSFmPgzUYHJnAeKeHZOSsIcmAf+n7ou1VtoV0q
h+0+6ALKJU0rqf7y6uT/azK/64UUgLkx/Sde5esCgYAzc1nkfK8wljVOSA9IkA3e
tSnvjUWzbhMc8nqOP3psEoAulN+8eBb7yCMNTivghQoOKBECB8WaaMMsiobk51y5
0K/xrEFvbt4Rre7mFDX2uzZzPp4x/+1oj9cV6Y688zE+VTx3rpq85CqWT3kHH5Dg
lYKXkk+13rf7Q2Ly628UvQKBgB3JbQaZvyRBvIhtwxT+6vBUvCbfnQjNp+z4+rFv
vG3/K3Se0WIfQ1Tu47m3WXY9vs7WW++s4EMAMjhnkcwolFYemMGsKmQTF8lODMlF
GHy/Hi392VDmSLIj2uen19YhOSItAyNSZ9p9VW0BGlYwmauWqf9Zx5D4RLKtLNuV
SBeVAoGBALkZ7OHfkFeF2In1OGVMEMn1+IPVig9zNEoLT3j5VUjcGXF6wAiiTx2z
uhSGsAsjLxsTZPBv5Gg12qY3iOoTMkqzzM5oXT0fdEl5cEEoouGNfpMzymREU+fp
BIb5jpNRHEA88HFPV9XIye4y4UUwFhMjiu9LCK6sZWJTcDH6E8tc
-----END RSA PRIVATE KEY-----

View File

@ -0,0 +1,62 @@
-----BEGIN CERTIFICATE-----
MIIGBjCCBO6gAwIBAgIQAXrysuGheu5fNbEpJFnsKTANBgkqhkiG9w0BAQsFADBu
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMS0wKwYDVQQDEyRFbmNyeXB0aW9uIEV2ZXJ5d2hlcmUg
RFYgVExTIENBIC0gRzIwHhcNMjQwNjAzMDAwMDAwWhcNMjQwODMxMjM1OTU5WjAY
MRYwFAYDVQQDEw1odXlpbmZ1LnNwYWNlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
MIIBCgKCAQEAywS+PTGLpIzq6eWOblqsF1cBUDxSaWjSR/y6oTYO6ST1oE7sP8MK
Crwgq9/zUxem6LyNOIShXW9aNoCKeZmQQWo/LTz/rLbl6ZtvZV/mJa8fKCxdZHnp
p+PpCu5gtdIxPFQpxrK9l3SRyzk61fyXRLhTXWfQKAAFFay9ReXvwH6PlIhT3ym4
I5to9wCulchelgsLwijJZkLAkODKYWr6ZtLLCi5kb50NZnS4mSjO7acGae7Yuxyr
hc+ZPfkK+hngpiuAWLgvSn8jJywuvl2cYAv+jMNkmmScwHublFfCYknpzvxeDWCc
rsZq+GtDRJNMgH/8Bc1TxlR5qq8hjzvirwIDAQABo4IC9DCCAvAwHwYDVR0jBBgw
FoAUeN+RkF/u3qz2xXXr1UxVU+8kSrYwHQYDVR0OBBYEFBnzi9AQeDyPTMe+iCua
+UmJ9DEyMCsGA1UdEQQkMCKCDWh1eWluZnUuc3BhY2WCEXd3dy5odXlpbmZ1LnNw
YWNlMD4GA1UdIAQ3MDUwMwYGZ4EMAQIBMCkwJwYIKwYBBQUHAgEWG2h0dHA6Ly93
d3cuZGlnaWNlcnQuY29tL0NQUzAOBgNVHQ8BAf8EBAMCBaAwHQYDVR0lBBYwFAYI
KwYBBQUHAwEGCCsGAQUFBwMCMIGABggrBgEFBQcBAQR0MHIwJAYIKwYBBQUHMAGG
GGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBKBggrBgEFBQcwAoY+aHR0cDovL2Nh
Y2VydHMuZGlnaWNlcnQuY29tL0VuY3J5cHRpb25FdmVyeXdoZXJlRFZUTFNDQS1H
Mi5jcnQwDAYDVR0TAQH/BAIwADCCAX8GCisGAQQB1nkCBAIEggFvBIIBawFpAHcA
dv+IPwq2+5VRwmHM9Ye6NLSkzbsp3GhCCp/mZ0xaOnQAAAGP3fbFrQAABAMASDBG
AiEAsA/gHlhFDFlwWkRU5ElFD/l0cwGTKk8mGYunNDg+UIMCIQDb6jp5OIlIoHFS
D2bSrd4NJNzp0pUrt1zGO+01zCPY+QB1AEiw42vapkc0D+VqAvqdMOscUgHLVt0s
gdm7v6s52IRzAAABj932xRQAAAQDAEYwRAIgFFyX/KOgpovdY3vKHZqs2KpDQpi1
iYGdy6/iG62iLuICIBiqOIThvyIk2fSaj5Q0xPuJqQZRuBFKSG1yzfdETgbxAHcA
2ra/az+1tiKfm8K7XGvocJFxbLtRhIU0vaQ9MEjX+6sAAAGP3fbFywAABAMASDBG
AiEA0D83wkK0UuXkwA1zUOivL/ngPDWz0RjgxjISezYsCJECIQC8DiPeD9lFMSh1
WI3Kpgt+ygrha/B1ce+km01fCcUEtjANBgkqhkiG9w0BAQsFAAOCAQEAbS7bvmh2
nA5DTEFTN8bjiqJU6SKJhritM0JooeUDW1kf/9TUD7xEFxSvlU29gCaYqzBR8QFp
OoFdq77l6onQBN5R4hMMSV6FSCy4koIDHMQ+0umQRawXH6EuALd9QuTtOPdtFFmX
OQ4hFNLJNmhLz4YXiuK6Aoxm83u1+34iv/KHx2b9zkb0TkCqyJAmz9vukCW1ViVe
DwWPE5UtvIR54rSNAInH5fzmbX1mW7gYw0EbcEiS8KT/9s6N69MAbkr6FJ1ECul8
c7990JMznjW2D8BImR/QtSmf9WCOdxJpyEXGPXQFnq1n8G5fi9mtxrh+Bpm9NxWF
JNj/6iyw5GzG/A==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEqjCCA5KgAwIBAgIQDeD/te5iy2EQn2CMnO1e0zANBgkqhkiG9w0BAQsFADBh
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
MjAeFw0xNzExMjcxMjQ2NDBaFw0yNzExMjcxMjQ2NDBaMG4xCzAJBgNVBAYTAlVT
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
b20xLTArBgNVBAMTJEVuY3J5cHRpb24gRXZlcnl3aGVyZSBEViBUTFMgQ0EgLSBH
MjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAO8Uf46i/nr7pkgTDqnE
eSIfCFqvPnUq3aF1tMJ5hh9MnO6Lmt5UdHfBGwC9Si+XjK12cjZgxObsL6Rg1njv
NhAMJ4JunN0JGGRJGSevbJsA3sc68nbPQzuKp5Jc8vpryp2mts38pSCXorPR+sch
QisKA7OSQ1MjcFN0d7tbrceWFNbzgL2csJVQeogOBGSe/KZEIZw6gXLKeFe7mupn
NYJROi2iC11+HuF79iAttMc32Cv6UOxixY/3ZV+LzpLnklFq98XORgwkIJL1HuvP
ha8yvb+W6JislZJL+HLFtidoxmI7Qm3ZyIV66W533DsGFimFJkz3y0GeHWuSVMbI
lfsCAwEAAaOCAU8wggFLMB0GA1UdDgQWBBR435GQX+7erPbFdevVTFVT7yRKtjAf
BgNVHSMEGDAWgBROIlQgGJXm427mD/r6uRLtBhePOTAOBgNVHQ8BAf8EBAMCAYYw
HQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMCMBIGA1UdEwEB/wQIMAYBAf8C
AQAwNAYIKwYBBQUHAQEEKDAmMCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdp
Y2VydC5jb20wQgYDVR0fBDswOTA3oDWgM4YxaHR0cDovL2NybDMuZGlnaWNlcnQu
Y29tL0RpZ2lDZXJ0R2xvYmFsUm9vdEcyLmNybDBMBgNVHSAERTBDMDcGCWCGSAGG
/WwBAjAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2VydC5jb20vQ1BT
MAgGBmeBDAECATANBgkqhkiG9w0BAQsFAAOCAQEAoBs1eCLKakLtVRPFRjBIJ9LJ
L0s8ZWum8U8/1TMVkQMBn+CPb5xnCD0GSA6L/V0ZFrMNqBirrr5B241OesECvxIi
98bZ90h9+q/X5eMyOD35f8YTaEMpdnQCnawIwiHx06/0BfiTj+b/XQih+mqt3ZXe
xNCJqKexdiB2IWGSKcgahPacWkk/BAQFisKIFYEqHzV974S3FAz/8LIfD58xnsEN
GfzyIDkH3JrwYZ8caPTf6ZX9M1GrISN8HnWTtdNCH2xEajRa/h9ZBXjUyFKQrGk2
n2hcLrfZSbynEC/pSw/ET7H5nWwckjmAJ1l9fcnbqkU/pf6uMQmnfl0JQjJNSg==
-----END CERTIFICATE-----

View File

@ -1,20 +1,18 @@
// Keep in sync with backend (llm.py)
// Order here matches dropdown order
export enum CodeGenerationModel {
CLAUDE_3_5_SONNET_2024_06_20 = "claude-3-5-sonnet-20240620",
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13",
GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-2024-04-09",
GPT_4_VISION = "gpt_4_vision",
CLAUDE_3_SONNET = "claude_3_sonnet",
GPT_4O_2024_05_13 = "gpt-4o",
GPT_4_TURBO_2024_04_09 = "gpt-4-turbo-preview",
GPT_4_VISION = "gpt-4-vision-preview",
CLAUDE_3_SONNET = "claude-3-sonnet",
}
// Will generate a static error if a model in the enum above is not in the descriptions
export const CODE_GENERATION_MODEL_DESCRIPTIONS: {
[key in CodeGenerationModel]: { name: string; inBeta: boolean };
} = {
"gpt-4o-2024-05-13": { name: "GPT-4o", inBeta: false },
"claude-3-5-sonnet-20240620": { name: "Claude 3.5 Sonnet", inBeta: false },
"gpt-4-turbo-2024-04-09": { name: "GPT-4 Turbo (deprecated)", inBeta: false },
gpt_4_vision: { name: "GPT-4 Vision (deprecated)", inBeta: false },
claude_3_sonnet: { name: "Claude 3 (deprecated)", inBeta: false },
"gpt-4o": { name: "GPT-4o", inBeta: false },
"gpt-4-turbo-preview": { name: "GPT-4 Turbo (deprecated)", inBeta: false },
"gpt-4-vision-preview": { name: "GPT-4 Vision (deprecated)", inBeta: false },
"claude-3-sonnet": { name: "Claude 3 (deprecated)", inBeta: false },
};