Compare commits

..

2 Commits

Author SHA1 Message Date
Abi Raja
2ec4bf59d3 add a beta label 2024-07-09 13:03:27 -04:00
Abi Raja
d13ae72c06 add basic text to ui generation 2024-07-09 12:53:07 -04:00
61 changed files with 1414 additions and 2288 deletions

View File

@ -33,6 +33,10 @@ We also just added experimental support for taking a video/screen recording of a
[Follow me on Twitter for updates](https://twitter.com/_abi_). [Follow me on Twitter for updates](https://twitter.com/_abi_).
## Sponsors
<a href="https://konghq.com/products/kong-konnect?utm_medium=referral&utm_source=github&utm_campaign=platform&utm_content=screenshot-to-code" target="_blank" title="Kong - powering the API world"><img src="https://picoapps.xyz/s2c-sponsors/Kong-GitHub-240x100.png"></a>
## 🚀 Hosted Version ## 🚀 Hosted Version
[Try it live on the hosted version (paid)](https://screenshottocode.com). [Try it live on the hosted version (paid)](https://screenshottocode.com).

View File

@ -7,19 +7,19 @@ repos:
- id: end-of-file-fixer - id: end-of-file-fixer
- id: check-yaml - id: check-yaml
- id: check-added-large-files - id: check-added-large-files
# - repo: local - repo: local
# hooks: hooks:
# - id: poetry-pytest - id: poetry-pytest
# name: Run pytest with Poetry name: Run pytest with Poetry
# entry: poetry run --directory backend pytest entry: poetry run --directory backend pytest
language: system
pass_filenames: false
always_run: true
files: ^backend/
# - id: poetry-pyright
# name: Run pyright with Poetry
# entry: poetry run --directory backend pyright
# language: system # language: system
# pass_filenames: false # pass_filenames: false
# always_run: true # always_run: true
# files: ^backend/ # files: ^backend/
# # - id: poetry-pyright
# # name: Run pyright with Poetry
# # entry: poetry run --directory backend pyright
# # language: system
# # pass_filenames: false
# # always_run: true
# # files: ^backend/

View File

@ -3,15 +3,7 @@
# TODO: Should only be set to true when value is 'True', not any abitrary truthy value # TODO: Should only be set to true when value is 'True', not any abitrary truthy value
import os import os
NUM_VARIANTS = 2
# LLM-related
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", None)
ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", None) ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", None)
OPENAI_BASE_URL = os.environ.get("OPENAI_BASE_URL", None)
# Image generation (optional)
REPLICATE_API_KEY = os.environ.get("REPLICATE_API_KEY", None)
# Debugging-related # Debugging-related

View File

@ -4,4 +4,5 @@ from typing import Literal
InputMode = Literal[ InputMode = Literal[
"image", "image",
"video", "video",
"text",
] ]

View File

@ -1,40 +1,38 @@
from config import ANTHROPIC_API_KEY, OPENAI_API_KEY import os
from config import ANTHROPIC_API_KEY
from llm import Llm, stream_claude_response, stream_openai_response from llm import Llm, stream_claude_response, stream_openai_response
from prompts import assemble_prompt from prompts import assemble_prompt
from prompts.types import Stack from prompts.types import Stack
from openai.types.chat import ChatCompletionMessageParam
async def generate_code_for_image(image_url: str, stack: Stack, model: Llm) -> str: async def generate_code_core(image_url: str, stack: Stack, model: Llm) -> str:
prompt_messages = assemble_prompt(image_url, stack) prompt_messages = assemble_prompt(image_url, stack)
return await generate_code_core(prompt_messages, model) openai_api_key = os.environ.get("OPENAI_API_KEY")
anthropic_api_key = ANTHROPIC_API_KEY
openai_base_url = None
async def process_chunk(content: str):
async def generate_code_core(
prompt_messages: list[ChatCompletionMessageParam], model: Llm
) -> str:
async def process_chunk(_: str):
pass pass
if model == Llm.CLAUDE_3_SONNET or model == Llm.CLAUDE_3_5_SONNET_2024_06_20: if model == Llm.CLAUDE_3_SONNET or model == Llm.CLAUDE_3_5_SONNET_2024_06_20:
if not ANTHROPIC_API_KEY: if not anthropic_api_key:
raise Exception("Anthropic API key not found") raise Exception("Anthropic API key not found")
completion = await stream_claude_response( completion = await stream_claude_response(
prompt_messages, prompt_messages,
api_key=ANTHROPIC_API_KEY, api_key=anthropic_api_key,
callback=lambda x: process_chunk(x), callback=lambda x: process_chunk(x),
model=model, model=model,
) )
else: else:
if not OPENAI_API_KEY: if not openai_api_key:
raise Exception("OpenAI API key not found") raise Exception("OpenAI API key not found")
completion = await stream_openai_response( completion = await stream_openai_response(
prompt_messages, prompt_messages,
api_key=OPENAI_API_KEY, api_key=openai_api_key,
base_url=None, base_url=openai_base_url,
callback=lambda x: process_chunk(x), callback=lambda x: process_chunk(x),
model=model, model=model,
) )

View File

@ -1,23 +0,0 @@
from datetime import datetime
import json
import os
from openai.types.chat import ChatCompletionMessageParam
def write_logs(prompt_messages: list[ChatCompletionMessageParam], completion: str):
# Get the logs path from environment, default to the current working directory
logs_path = os.environ.get("LOGS_PATH", os.getcwd())
# Create run_logs directory if it doesn't exist within the specified logs path
logs_directory = os.path.join(logs_path, "run_logs")
if not os.path.exists(logs_directory):
os.makedirs(logs_directory)
print("Writing to logs directory:", logs_directory)
# Generate a unique filename using the current timestamp within the logs directory
filename = datetime.now().strftime(f"{logs_directory}/messages_%Y%m%d_%H%M%S.json")
# Write the messages dict into a new file for each run
with open(filename, "w") as f:
f.write(json.dumps({"prompt": prompt_messages, "completion": completion}))

View File

@ -1,33 +1,17 @@
import asyncio import asyncio
import re import re
from typing import Dict, List, Literal, Union from typing import Dict, List, Union
from openai import AsyncOpenAI from openai import AsyncOpenAI
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from image_generation.replicate import call_replicate
async def process_tasks(prompts: List[str], api_key: str, base_url: str | None):
async def process_tasks( tasks = [generate_image(prompt, api_key, base_url) for prompt in prompts]
prompts: List[str],
api_key: str,
base_url: str | None,
model: Literal["dalle3", "flux"],
):
import time
start_time = time.time()
if model == "dalle3":
tasks = [generate_image_dalle(prompt, api_key, base_url) for prompt in prompts]
else:
tasks = [generate_image_replicate(prompt, api_key) for prompt in prompts]
results = await asyncio.gather(*tasks, return_exceptions=True) results = await asyncio.gather(*tasks, return_exceptions=True)
end_time = time.time()
generation_time = end_time - start_time
print(f"Image generation time: {generation_time:.2f} seconds")
processed_results: List[Union[str, None]] = [] processed_results: List[Union[str, None]] = []
for result in results: for result in results:
if isinstance(result, BaseException): if isinstance(result, Exception):
print(f"An exception occurred: {result}") print(f"An exception occurred: {result}")
processed_results.append(None) processed_results.append(None)
else: else:
@ -36,7 +20,7 @@ async def process_tasks(
return processed_results return processed_results
async def generate_image_dalle( async def generate_image(
prompt: str, api_key: str, base_url: str | None prompt: str, api_key: str, base_url: str | None
) -> Union[str, None]: ) -> Union[str, None]:
client = AsyncOpenAI(api_key=api_key, base_url=base_url) client = AsyncOpenAI(api_key=api_key, base_url=base_url)
@ -52,21 +36,6 @@ async def generate_image_dalle(
return res.data[0].url return res.data[0].url
async def generate_image_replicate(prompt: str, api_key: str) -> str:
# We use Flux Schnell
return await call_replicate(
{
"prompt": prompt,
"num_outputs": 1,
"aspect_ratio": "1:1",
"output_format": "png",
"output_quality": 100,
},
api_key,
)
def extract_dimensions(url: str): def extract_dimensions(url: str):
# Regular expression to match numbers in the format '300x200' # Regular expression to match numbers in the format '300x200'
matches = re.findall(r"(\d+)x(\d+)", url) matches = re.findall(r"(\d+)x(\d+)", url)
@ -94,11 +63,7 @@ def create_alt_url_mapping(code: str) -> Dict[str, str]:
async def generate_images( async def generate_images(
code: str, code: str, api_key: str, base_url: Union[str, None], image_cache: Dict[str, str]
api_key: str,
base_url: Union[str, None],
image_cache: Dict[str, str],
model: Literal["dalle3", "flux"] = "dalle3",
) -> str: ) -> str:
# Find all images # Find all images
soup = BeautifulSoup(code, "html.parser") soup = BeautifulSoup(code, "html.parser")
@ -126,7 +91,7 @@ async def generate_images(
return code return code
# Generate images # Generate images
results = await process_tasks(prompts, api_key, base_url, model) results = await process_tasks(prompts, api_key, base_url)
# Create a dict mapping alt text to image URL # Create a dict mapping alt text to image URL
mapped_image_urls = dict(zip(prompts, results)) mapped_image_urls = dict(zip(prompts, results))

View File

@ -1,65 +0,0 @@
import asyncio
import httpx
async def call_replicate(input: dict[str, str | int], api_token: str) -> str:
headers = {
"Authorization": f"Bearer {api_token}",
"Content-Type": "application/json",
}
data = {"input": input}
async with httpx.AsyncClient() as client:
try:
response = await client.post(
"https://api.replicate.com/v1/models/black-forest-labs/flux-schnell/predictions",
headers=headers,
json=data,
)
response.raise_for_status()
response_json = response.json()
# Extract the id from the response
prediction_id = response_json.get("id")
if not prediction_id:
raise ValueError("Prediction ID not found in initial response.")
# Polling every 0.1 seconds until the status is succeeded or error (upto 10s)
num_polls = 0
max_polls = 100
while num_polls < max_polls:
num_polls += 1
await asyncio.sleep(0.1)
# Check the status
status_check_url = (
f"https://api.replicate.com/v1/predictions/{prediction_id}"
)
status_response = await client.get(status_check_url, headers=headers)
status_response.raise_for_status()
status_response_json = status_response.json()
status = status_response_json.get("status")
# If status is succeeded or if there's an error, break out of the loop
if status == "succeeded":
return status_response_json["output"][0]
elif status == "error":
raise ValueError(
f"Inference errored out: {status_response_json.get('error', 'Unknown error')}"
)
elif status == "failed":
raise ValueError("Inference failed")
# If we've reached here, it means we've exceeded the max number of polls
raise TimeoutError("Inference timed out")
except httpx.HTTPStatusError as e:
raise ValueError(f"HTTP error occurred: {e}")
except httpx.RequestError as e:
raise ValueError(f"An error occurred while requesting: {e}")
except asyncio.TimeoutError:
raise TimeoutError("Request timed out")
except Exception as e:
raise ValueError(f"An unexpected error occurred: {e}")

View File

@ -1,4 +1,4 @@
import copy import base64
from enum import Enum from enum import Enum
from typing import Any, Awaitable, Callable, List, cast from typing import Any, Awaitable, Callable, List, cast
from anthropic import AsyncAnthropic from anthropic import AsyncAnthropic
@ -88,16 +88,12 @@ async def stream_claude_response(
client = AsyncAnthropic(api_key=api_key) client = AsyncAnthropic(api_key=api_key)
# Base parameters # Base parameters
max_tokens = 8192 max_tokens = 4096
temperature = 0.0 temperature = 0.0
# Translate OpenAI messages to Claude messages # Translate OpenAI messages to Claude messages
system_prompt = cast(str, messages[0].get("content"))
# Deep copy messages to avoid modifying the original list claude_messages = [dict(message) for message in messages[1:]]
cloned_messages = copy.deepcopy(messages)
system_prompt = cast(str, cloned_messages[0].get("content"))
claude_messages = [dict(message) for message in cloned_messages[1:]]
for message in claude_messages: for message in claude_messages:
if not isinstance(message["content"], list): if not isinstance(message["content"], list):
continue continue
@ -130,7 +126,6 @@ async def stream_claude_response(
temperature=temperature, temperature=temperature,
system=system_prompt, system=system_prompt,
messages=claude_messages, # type: ignore messages=claude_messages, # type: ignore
extra_headers={"anthropic-beta": "max-tokens-3-5-sonnet-2024-07-15"},
) as stream: ) as stream:
async for text in stream.text_stream: async for text in stream.text_stream:
await callback(text) await callback(text)

View File

@ -8,7 +8,7 @@ STREAM_CHUNK_SIZE = 20
async def mock_completion( async def mock_completion(
process_chunk: Callable[[str, int], Awaitable[None]], input_mode: InputMode process_chunk: Callable[[str], Awaitable[None]], input_mode: InputMode
) -> str: ) -> str:
code_to_return = ( code_to_return = (
TALLY_FORM_VIDEO_PROMPT_MOCK TALLY_FORM_VIDEO_PROMPT_MOCK
@ -17,7 +17,7 @@ async def mock_completion(
) )
for i in range(0, len(code_to_return), STREAM_CHUNK_SIZE): for i in range(0, len(code_to_return), STREAM_CHUNK_SIZE):
await process_chunk(code_to_return[i : i + STREAM_CHUNK_SIZE], 0) await process_chunk(code_to_return[i : i + STREAM_CHUNK_SIZE])
await asyncio.sleep(0.01) await asyncio.sleep(0.01)
if input_mode == "video": if input_mode == "video":

430
backend/poetry.lock generated
View File

@ -1,115 +1,5 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "aiohttp"
version = "3.9.5"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
files = [
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
{file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
{file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
{file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
{file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
{file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
{file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
{file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
{file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
{file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
{file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
{file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
{file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
]
[package.dependencies]
aiosignal = ">=1.1.2"
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
attrs = ">=17.3.0"
frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0"
yarl = ">=1.0,<2.0"
[package.extras]
speedups = ["Brotli", "aiodns", "brotlicffi"]
[[package]]
name = "aiosignal"
version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
optional = false
python-versions = ">=3.7"
files = [
{file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
{file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
]
[package.dependencies]
frozenlist = ">=1.1.0"
[[package]] [[package]]
name = "anthropic" name = "anthropic"
version = "0.18.1" version = "0.18.1"
@ -156,36 +46,6 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"] trio = ["trio (>=0.23)"]
[[package]]
name = "async-timeout"
version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.7"
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
[[package]]
name = "attrs"
version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]] [[package]]
name = "beautifulsoup4" name = "beautifulsoup4"
version = "4.12.3" version = "4.12.3"
@ -437,92 +297,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"] typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "frozenlist"
version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
optional = false
python-versions = ">=3.8"
files = [
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
{file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
{file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
{file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
{file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
{file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
{file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
{file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
{file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
{file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
{file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
{file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
{file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
{file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
{file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
{file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
{file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
{file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
{file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
{file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
{file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
{file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
{file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
{file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
{file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
{file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
{file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
{file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
{file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
{file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
{file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
{file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
{file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
{file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
{file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
{file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
{file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
{file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
{file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
{file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
{file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
{file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
{file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
{file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"},
{file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"},
{file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"},
{file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"},
{file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"},
{file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"},
{file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"},
{file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"},
{file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"},
{file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"},
{file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"},
{file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"},
{file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"},
{file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"},
{file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"},
{file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"},
{file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"},
{file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"},
{file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"},
{file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"},
{file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"},
{file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"},
{file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"},
{file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"},
{file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"},
{file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"},
{file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"},
{file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"},
{file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"},
{file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"},
{file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
{file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
]
[[package]] [[package]]
name = "fsspec" name = "fsspec"
version = "2024.2.0" version = "2024.2.0"
@ -757,105 +531,6 @@ doc = ["Sphinx (>=1.5.2,<2.0)", "numpydoc (>=0.6.0,<1.0)", "pygame (>=1.9.3,<2.0
optional = ["matplotlib (>=2.0.0,<3.0)", "opencv-python (>=3.0,<4.0)", "scikit-image (>=0.13.0,<1.0)", "scikit-learn", "scipy (>=0.19.0,<1.5)", "youtube_dl"] optional = ["matplotlib (>=2.0.0,<3.0)", "opencv-python (>=3.0,<4.0)", "scikit-image (>=0.13.0,<1.0)", "scikit-learn", "scipy (>=0.19.0,<1.5)", "youtube_dl"]
test = ["coverage (<5.0)", "coveralls (>=1.1,<2.0)", "pytest (>=3.0.0,<4.0)", "pytest-cov (>=2.5.1,<3.0)", "requests (>=2.8.1,<3.0)"] test = ["coverage (<5.0)", "coveralls (>=1.1,<2.0)", "pytest (>=3.0.0,<4.0)", "pytest-cov (>=2.5.1,<3.0)", "requests (>=2.8.1,<3.0)"]
[[package]]
name = "multidict"
version = "6.0.5"
description = "multidict implementation"
optional = false
python-versions = ">=3.7"
files = [
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
{file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
{file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
{file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
{file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
{file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
{file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
{file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
{file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
{file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
{file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
{file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
{file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
{file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
{file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
{file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
{file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
{file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
{file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
{file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
{file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
{file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
{file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
{file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
{file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
{file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
{file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
{file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
{file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
{file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
{file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
{file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
{file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
{file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
{file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
{file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
{file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
]
[[package]] [[package]]
name = "nodeenv" name = "nodeenv"
version = "1.8.0" version = "1.8.0"
@ -1656,110 +1331,7 @@ files = [
{file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
] ]
[[package]]
name = "yarl"
version = "1.9.4"
description = "Yet another URL library"
optional = false
python-versions = ">=3.7"
files = [
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
{file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
{file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
{file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
{file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
{file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
{file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
{file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
{file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
{file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
{file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
{file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
{file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
{file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
{file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
{file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
{file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
{file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
{file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
{file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
{file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
{file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
{file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
{file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
{file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
{file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
{file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
{file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
{file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
{file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
{file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
{file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
{file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
{file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
{file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
{file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
{file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
{file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
]
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "9cec287b530dcda39515e7c76eed76d88eb0c725f37148af5ef93083cfb46ad8" content-hash = "5c21a836f7db5b25b6d0aa971bc04c9a935444be57b3fbf83b8316c46f261e24"

View File

@ -1,12 +1,12 @@
from typing import Union from typing import List, NoReturn, Union
from openai.types.chat import ChatCompletionMessageParam, ChatCompletionContentPartParam
from openai.types.chat import ChatCompletionMessageParam, ChatCompletionContentPartParam
from llm import Llm
from custom_types import InputMode
from image_generation.core import create_alt_url_mapping
from prompts.imported_code_prompts import IMPORTED_CODE_SYSTEM_PROMPTS from prompts.imported_code_prompts import IMPORTED_CODE_SYSTEM_PROMPTS
from prompts.screenshot_system_prompts import SYSTEM_PROMPTS from prompts.screenshot_system_prompts import SYSTEM_PROMPTS
from prompts.text_prompts import SYSTEM_PROMPTS as TEXT_SYSTEM_PROMPTS
from prompts.types import Stack from prompts.types import Stack
from video.utils import assemble_claude_prompt_video
USER_PROMPT = """ USER_PROMPT = """
@ -18,65 +18,9 @@ Generate code for a SVG that looks exactly like this.
""" """
async def create_prompt(
params: dict[str, str], stack: Stack, input_mode: InputMode
) -> tuple[list[ChatCompletionMessageParam], dict[str, str]]:
image_cache: dict[str, str] = {}
# If this generation started off with imported code, we need to assemble the prompt differently
if params.get("isImportedFromCode"):
original_imported_code = params["history"][0]
prompt_messages = assemble_imported_code_prompt(original_imported_code, stack)
for index, text in enumerate(params["history"][1:]):
if index % 2 == 0:
message: ChatCompletionMessageParam = {
"role": "user",
"content": text,
}
else:
message: ChatCompletionMessageParam = {
"role": "assistant",
"content": text,
}
prompt_messages.append(message)
else:
# Assemble the prompt for non-imported code
if params.get("resultImage"):
prompt_messages = assemble_prompt(
params["image"], stack, params["resultImage"]
)
else:
prompt_messages = assemble_prompt(params["image"], stack)
if params["generationType"] == "update":
# Transform the history tree into message format
# TODO: Move this to frontend
for index, text in enumerate(params["history"]):
if index % 2 == 0:
message: ChatCompletionMessageParam = {
"role": "assistant",
"content": text,
}
else:
message: ChatCompletionMessageParam = {
"role": "user",
"content": text,
}
prompt_messages.append(message)
image_cache = create_alt_url_mapping(params["history"][-2])
if input_mode == "video":
video_data_url = params["image"]
prompt_messages = await assemble_claude_prompt_video(video_data_url)
return prompt_messages, image_cache
def assemble_imported_code_prompt( def assemble_imported_code_prompt(
code: str, stack: Stack code: str, stack: Stack, model: Llm
) -> list[ChatCompletionMessageParam]: ) -> List[ChatCompletionMessageParam]:
system_content = IMPORTED_CODE_SYSTEM_PROMPTS[stack] system_content = IMPORTED_CODE_SYSTEM_PROMPTS[stack]
user_content = ( user_content = (
@ -85,12 +29,24 @@ def assemble_imported_code_prompt(
else "Here is the code of the SVG: " + code else "Here is the code of the SVG: " + code
) )
if model == Llm.CLAUDE_3_5_SONNET_2024_06_20:
return [ return [
{ {
"role": "system", "role": "system",
"content": system_content + "\n " + user_content, "content": system_content + "\n " + user_content,
} }
] ]
else:
return [
{
"role": "system",
"content": system_content,
},
{
"role": "user",
"content": user_content,
},
]
# TODO: Use result_image_data_url # TODO: Use result_image_data_url
@ -98,11 +54,11 @@ def assemble_prompt(
image_data_url: str, image_data_url: str,
stack: Stack, stack: Stack,
result_image_data_url: Union[str, None] = None, result_image_data_url: Union[str, None] = None,
) -> list[ChatCompletionMessageParam]: ) -> List[ChatCompletionMessageParam]:
system_content = SYSTEM_PROMPTS[stack] system_content = SYSTEM_PROMPTS[stack]
user_prompt = USER_PROMPT if stack != "svg" else SVG_USER_PROMPT user_prompt = USER_PROMPT if stack != "svg" else SVG_USER_PROMPT
user_content: list[ChatCompletionContentPartParam] = [ user_content: List[ChatCompletionContentPartParam] = [
{ {
"type": "image_url", "type": "image_url",
"image_url": {"url": image_data_url, "detail": "high"}, "image_url": {"url": image_data_url, "detail": "high"},
@ -132,3 +88,22 @@ def assemble_prompt(
"content": user_content, "content": user_content,
}, },
] ]
def assemble_text_prompt(
text_prompt: str,
stack: Stack,
) -> List[ChatCompletionMessageParam]:
system_content = TEXT_SYSTEM_PROMPTS[stack]
return [
{
"role": "system",
"content": system_content,
},
{
"role": "user",
"content": "Generate UI for " + text_prompt,
},
]

View File

@ -391,81 +391,63 @@ def test_prompts():
def test_imported_code_prompts(): def test_imported_code_prompts():
code = "Sample code" tailwind_prompt = assemble_imported_code_prompt(
"code", "html_tailwind", Llm.GPT_4O_2024_05_13
tailwind_prompt = assemble_imported_code_prompt(code, "html_tailwind") )
expected_tailwind_prompt = [ expected_tailwind_prompt = [
{ {"role": "system", "content": IMPORTED_CODE_TAILWIND_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_TAILWIND_SYSTEM_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert tailwind_prompt == expected_tailwind_prompt assert tailwind_prompt == expected_tailwind_prompt
html_css_prompt = assemble_imported_code_prompt(code, "html_css") html_css_prompt = assemble_imported_code_prompt(
"code", "html_css", Llm.GPT_4O_2024_05_13
)
expected_html_css_prompt = [ expected_html_css_prompt = [
{ {"role": "system", "content": IMPORTED_CODE_HTML_CSS_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_HTML_CSS_SYSTEM_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert html_css_prompt == expected_html_css_prompt assert html_css_prompt == expected_html_css_prompt
react_tailwind_prompt = assemble_imported_code_prompt(code, "react_tailwind") react_tailwind_prompt = assemble_imported_code_prompt(
"code", "react_tailwind", Llm.GPT_4O_2024_05_13
)
expected_react_tailwind_prompt = [ expected_react_tailwind_prompt = [
{ {"role": "system", "content": IMPORTED_CODE_REACT_TAILWIND_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_REACT_TAILWIND_SYSTEM_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert react_tailwind_prompt == expected_react_tailwind_prompt assert react_tailwind_prompt == expected_react_tailwind_prompt
bootstrap_prompt = assemble_imported_code_prompt(code, "bootstrap") bootstrap_prompt = assemble_imported_code_prompt(
"code", "bootstrap", Llm.GPT_4O_2024_05_13
)
expected_bootstrap_prompt = [ expected_bootstrap_prompt = [
{ {"role": "system", "content": IMPORTED_CODE_BOOTSTRAP_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_BOOTSTRAP_SYSTEM_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert bootstrap_prompt == expected_bootstrap_prompt assert bootstrap_prompt == expected_bootstrap_prompt
ionic_tailwind = assemble_imported_code_prompt(code, "ionic_tailwind") ionic_tailwind = assemble_imported_code_prompt(
"code", "ionic_tailwind", Llm.GPT_4O_2024_05_13
)
expected_ionic_tailwind = [ expected_ionic_tailwind = [
{ {"role": "system", "content": IMPORTED_CODE_IONIC_TAILWIND_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_IONIC_TAILWIND_SYSTEM_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert ionic_tailwind == expected_ionic_tailwind assert ionic_tailwind == expected_ionic_tailwind
vue_tailwind = assemble_imported_code_prompt(code, "vue_tailwind") vue_tailwind = assemble_imported_code_prompt(
"code", "vue_tailwind", Llm.GPT_4O_2024_05_13
)
expected_vue_tailwind = [ expected_vue_tailwind = [
{ {"role": "system", "content": IMPORTED_CODE_VUE_TAILWIND_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the app: code"},
"content": IMPORTED_CODE_VUE_TAILWIND_PROMPT
+ "\n Here is the code of the app: "
+ code,
}
] ]
assert vue_tailwind == expected_vue_tailwind assert vue_tailwind == expected_vue_tailwind
svg = assemble_imported_code_prompt(code, "svg") svg = assemble_imported_code_prompt("code", "svg", Llm.GPT_4O_2024_05_13)
expected_svg = [ expected_svg = [
{ {"role": "system", "content": IMPORTED_CODE_SVG_SYSTEM_PROMPT},
"role": "system", {"role": "user", "content": "Here is the code of the SVG: code"},
"content": IMPORTED_CODE_SVG_SYSTEM_PROMPT
+ "\n Here is the code of the SVG: "
+ code,
}
] ]
assert svg == expected_svg assert svg == expected_svg

View File

@ -0,0 +1,37 @@
import unittest
from prompts.text_prompts import HTML_TAILWIND_SYSTEM_PROMPT
class TestTextPrompts(unittest.TestCase):
def test_html_tailwind_system_prompt(self):
self.maxDiff = None
print(HTML_TAILWIND_SYSTEM_PROMPT)
expected_prompt = """
You are an expert Tailwind developer.
- Make sure to make it look modern and sleek.
- Use modern, professional fonts and colors.
- Follow UX best practices.
- Do not add comments in the code such as "<!-- Add other navigation links as needed -->" and "<!-- ... other news items ... -->" in place of writing the full code. WRITE THE FULL CODE.
- For images, use placeholder images from https://placehold.co and include a detailed description of the image in the alt text so that an image generation AI can generate the image later.
In terms of libraries,
- Use this script to include Tailwind: <script src="https://cdn.tailwindcss.com"></script>
- You can use Google Fonts
- Font Awesome for icons: <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.3/css/all.min.css"></link>
Return only the full code in <html></html> tags.
Do not include markdown "```" or "```html" at the start or end.
Reply with only the code, and no text/explanation before and after the code.
"""
self.assertEqual(HTML_TAILWIND_SYSTEM_PROMPT.strip(), expected_prompt.strip())
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,126 @@
from prompts.types import SystemPrompts
GENERAL_INSTRUCTIONS = """
- Make sure to make it look modern and sleek.
- Use modern, professional fonts and colors.
- Follow UX best practices.
- Do not add comments in the code such as "<!-- Add other navigation links as needed -->" and "<!-- ... other news items ... -->" in place of writing the full code. WRITE THE FULL CODE.
- For images, use placeholder images from https://placehold.co and include a detailed description of the image in the alt text so that an image generation AI can generate the image later."""
LIBRARY_INSTRUCTIONS = """
- You can use Google Fonts
- Font Awesome for icons: <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.3/css/all.min.css"></link>"""
FORMAT_INSTRUCTIONS = """
Return only the full code in <html></html> tags.
Do not include markdown "```" or "```html" at the start or end.
Reply with only the code, and no text/explanation before and after the code.
"""
HTML_TAILWIND_SYSTEM_PROMPT = f"""
You are an expert Tailwind developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
- Use this script to include Tailwind: <script src="https://cdn.tailwindcss.com"></script>
{LIBRARY_INSTRUCTIONS}
{FORMAT_INSTRUCTIONS}
"""
HTML_CSS_SYSTEM_PROMPT = f"""
You are an expert HTML, CSS and JS developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
{LIBRARY_INSTRUCTIONS}
{FORMAT_INSTRUCTIONS}
"""
REACT_TAILWIND_SYSTEM_PROMPT = f"""
You are an expert React/Tailwind developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
- Use these script to include React so that it can run on a standalone page:
<script src="https://unpkg.com/react/umd/react.development.js"></script>
<script src="https://unpkg.com/react-dom/umd/react-dom.development.js"></script>
<script src="https://unpkg.com/@babel/standalone/babel.js"></script>
- Use this script to include Tailwind: <script src="https://cdn.tailwindcss.com"></script>
{LIBRARY_INSTRUCTIONS}
{FORMAT_INSTRUCTIONS}
"""
BOOTSTRAP_SYSTEM_PROMPT = f"""
You are an expert Bootstrap, HTML and JS developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
- Use this script to include Bootstrap: <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-T3c6CoIi6uLrA9TneNEoa7RxnatzjcDSCmG1MXxSR1GAsXEV/Dwwykc2MPK8M2HN" crossorigin="anonymous">
{LIBRARY_INSTRUCTIONS}
{FORMAT_INSTRUCTIONS}
"""
IONIC_TAILWIND_SYSTEM_PROMPT = f"""
You are an expert Ionic/Tailwind developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
- Use these script to include Ionic so that it can run on a standalone page:
<script type="module" src="https://cdn.jsdelivr.net/npm/@ionic/core/dist/ionic/ionic.esm.js"></script>
<script nomodule src="https://cdn.jsdelivr.net/npm/@ionic/core/dist/ionic/ionic.js"></script>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@ionic/core/css/ionic.bundle.css" />
- Use this script to include Tailwind: <script src="https://cdn.tailwindcss.com"></script>
- You can use Google Fonts
- ionicons for icons, add the following <script > tags near the end of the page, right before the closing </body> tag:
<script type="module">
import ionicons from 'https://cdn.jsdelivr.net/npm/ionicons/+esm'
</script>
<script nomodule src="https://cdn.jsdelivr.net/npm/ionicons/dist/esm/ionicons.min.js"></script>
<link href="https://cdn.jsdelivr.net/npm/ionicons/dist/collection/components/icon/icon.min.css" rel="stylesheet">
{FORMAT_INSTRUCTIONS}
"""
VUE_TAILWIND_SYSTEM_PROMPT = f"""
You are an expert Vue/Tailwind developer.
{GENERAL_INSTRUCTIONS}
In terms of libraries,
- Use these script to include Vue so that it can run on a standalone page:
<script src="https://registry.npmmirror.com/vue/3.3.11/files/dist/vue.global.js"></script>
- Use this script to include Tailwind: <script src="https://cdn.tailwindcss.com"></script>
{LIBRARY_INSTRUCTIONS}
{FORMAT_INSTRUCTIONS}
"""
SVG_SYSTEM_PROMPT = f"""
You are an expert at building SVGs.
{GENERAL_INSTRUCTIONS}
Return only the full code in <svg></svg> tags.
Do not include markdown "```" or "```svg" at the start or end.
"""
SYSTEM_PROMPTS = SystemPrompts(
html_css=HTML_CSS_SYSTEM_PROMPT,
html_tailwind=HTML_TAILWIND_SYSTEM_PROMPT,
react_tailwind=REACT_TAILWIND_SYSTEM_PROMPT,
bootstrap=BOOTSTRAP_SYSTEM_PROMPT,
ionic_tailwind=IONIC_TAILWIND_SYSTEM_PROMPT,
vue_tailwind=VUE_TAILWIND_SYSTEM_PROMPT,
svg=SVG_SYSTEM_PROMPT,
)

View File

@ -19,7 +19,6 @@ anthropic = "^0.18.0"
moviepy = "^1.0.3" moviepy = "^1.0.3"
pillow = "^10.3.0" pillow = "^10.3.0"
types-pillow = "^10.2.0.20240520" types-pillow = "^10.2.0.20240520"
aiohttp = "^3.9.5"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pytest = "^7.4.3" pytest = "^7.4.3"

View File

@ -1,18 +1,9 @@
import asyncio import os
from dataclasses import dataclass
import traceback import traceback
from fastapi import APIRouter, WebSocket from fastapi import APIRouter, WebSocket
import openai import openai
from codegen.utils import extract_html_content from codegen.utils import extract_html_content
from config import ( from config import ANTHROPIC_API_KEY, IS_PROD, SHOULD_MOCK_AI_RESPONSE
ANTHROPIC_API_KEY,
IS_PROD,
NUM_VARIANTS,
OPENAI_API_KEY,
OPENAI_BASE_URL,
REPLICATE_API_KEY,
SHOULD_MOCK_AI_RESPONSE,
)
from custom_types import InputMode from custom_types import InputMode
from llm import ( from llm import (
Llm, Llm,
@ -21,97 +12,77 @@ from llm import (
stream_claude_response_native, stream_claude_response_native,
stream_openai_response, stream_openai_response,
) )
from fs_logging.core import write_logs from openai.types.chat import ChatCompletionMessageParam
from mock_llm import mock_completion from mock_llm import mock_completion
from typing import Any, Callable, Coroutine, Dict, List, Literal, cast, get_args from typing import Dict, List, Union, cast, get_args
from image_generation.core import generate_images from image_generation import create_alt_url_mapping, generate_images
from prompts import create_prompt from prompts import assemble_imported_code_prompt, assemble_prompt, assemble_text_prompt
from datetime import datetime
import json
from prompts.claude_prompts import VIDEO_PROMPT from prompts.claude_prompts import VIDEO_PROMPT
from prompts.types import Stack from prompts.types import Stack
from utils import pprint_prompt
# from utils import pprint_prompt # from utils import pprint_prompt
from video.utils import extract_tag_content, assemble_claude_prompt_video
from ws.constants import APP_ERROR_WEB_SOCKET_CODE # type: ignore from ws.constants import APP_ERROR_WEB_SOCKET_CODE # type: ignore
router = APIRouter() router = APIRouter()
# Auto-upgrade usage of older models def write_logs(prompt_messages: List[ChatCompletionMessageParam], completion: str):
def auto_upgrade_model(code_generation_model: Llm) -> Llm: # Get the logs path from environment, default to the current working directory
if code_generation_model in {Llm.GPT_4_VISION, Llm.GPT_4_TURBO_2024_04_09}: logs_path = os.environ.get("LOGS_PATH", os.getcwd())
print(
f"Initial deprecated model: {code_generation_model}. Auto-updating code generation model to GPT-4O-2024-05-13" # Create run_logs directory if it doesn't exist within the specified logs path
) logs_directory = os.path.join(logs_path, "run_logs")
return Llm.GPT_4O_2024_05_13 if not os.path.exists(logs_directory):
elif code_generation_model == Llm.CLAUDE_3_SONNET: os.makedirs(logs_directory)
print(
f"Initial deprecated model: {code_generation_model}. Auto-updating code generation model to CLAUDE-3.5-SONNET-2024-06-20" print("Writing to logs directory:", logs_directory)
)
return Llm.CLAUDE_3_5_SONNET_2024_06_20 # Generate a unique filename using the current timestamp within the logs directory
return code_generation_model filename = datetime.now().strftime(f"{logs_directory}/messages_%Y%m%d_%H%M%S.json")
# Write the messages dict into a new file for each run
with open(filename, "w") as f:
f.write(json.dumps({"prompt": prompt_messages, "completion": completion}))
# Generate images, if needed @router.websocket("/generate-code")
async def perform_image_generation( async def stream_code(websocket: WebSocket):
completion: str, await websocket.accept()
should_generate_images: bool,
openai_api_key: str | None, print("Incoming websocket connection...")
openai_base_url: str | None,
image_cache: dict[str, str], async def throw_error(
message: str,
): ):
replicate_api_key = REPLICATE_API_KEY await websocket.send_json({"type": "error", "value": message})
if not should_generate_images: await websocket.close(APP_ERROR_WEB_SOCKET_CODE)
return completion
if replicate_api_key: # TODO: Are the values always strings?
image_generation_model = "flux" params: Dict[str, str] = await websocket.receive_json()
api_key = replicate_api_key
else:
if not openai_api_key:
print(
"No OpenAI API key and Replicate key found. Skipping image generation."
)
return completion
image_generation_model = "dalle3"
api_key = openai_api_key
print("Generating images with model: ", image_generation_model) print("Received params")
return await generate_images( # Read the code config settings from the request. Fall back to default if not provided.
completion, generated_code_config = ""
api_key=api_key, if "generatedCodeConfig" in params and params["generatedCodeConfig"]:
base_url=openai_base_url, generated_code_config = params["generatedCodeConfig"]
image_cache=image_cache, if not generated_code_config in get_args(Stack):
model=image_generation_model,
)
@dataclass
class ExtractedParams:
stack: Stack
input_mode: InputMode
code_generation_model: Llm
should_generate_images: bool
openai_api_key: str | None
anthropic_api_key: str | None
openai_base_url: str | None
async def extract_params(
params: Dict[str, str], throw_error: Callable[[str], Coroutine[Any, Any, None]]
) -> ExtractedParams:
# Read the code config settings (stack) from the request.
generated_code_config = params.get("generatedCodeConfig", "")
if generated_code_config not in get_args(Stack):
await throw_error(f"Invalid generated code config: {generated_code_config}") await throw_error(f"Invalid generated code config: {generated_code_config}")
raise ValueError(f"Invalid generated code config: {generated_code_config}") return
validated_stack = cast(Stack, generated_code_config) # Cast the variable to the Stack type
valid_stack = cast(Stack, generated_code_config)
# Validate the input mode # Validate the input mode
input_mode = params.get("inputMode") input_mode = params.get("inputMode")
if input_mode not in get_args(InputMode): if not input_mode in get_args(InputMode):
await throw_error(f"Invalid input mode: {input_mode}") await throw_error(f"Invalid input mode: {input_mode}")
raise ValueError(f"Invalid input mode: {input_mode}") raise Exception(f"Invalid input mode: {input_mode}")
# Cast the variable to the right type
validated_input_mode = cast(InputMode, input_mode) validated_input_mode = cast(InputMode, input_mode)
# Read the model from the request. Fall back to default if not provided. # Read the model from the request. Fall back to default if not provided.
@ -120,224 +91,194 @@ async def extract_params(
) )
try: try:
code_generation_model = convert_frontend_str_to_llm(code_generation_model_str) code_generation_model = convert_frontend_str_to_llm(code_generation_model_str)
except ValueError: except:
await throw_error(f"Invalid model: {code_generation_model_str}") await throw_error(f"Invalid model: {code_generation_model_str}")
raise ValueError(f"Invalid model: {code_generation_model_str}") raise Exception(f"Invalid model: {code_generation_model_str}")
exact_llm_version = None
openai_api_key = get_from_settings_dialog_or_env( print(
params, "openAiApiKey", OPENAI_API_KEY f"Generating {generated_code_config} code for uploaded {input_mode} using {code_generation_model} model..."
) )
# Get the OpenAI API key from the request. Fall back to environment variable if not provided.
# If neither is provided, we throw an error.
openai_api_key = None
if params["openAiApiKey"]:
openai_api_key = params["openAiApiKey"]
print("Using OpenAI API key from client-side settings dialog")
else:
openai_api_key = os.environ.get("OPENAI_API_KEY")
if openai_api_key:
print("Using OpenAI API key from environment variable")
if not openai_api_key and (
code_generation_model == Llm.GPT_4_VISION
or code_generation_model == Llm.GPT_4_TURBO_2024_04_09
or code_generation_model == Llm.GPT_4O_2024_05_13
):
print("OpenAI API key not found")
await throw_error(
"No OpenAI API key found. Please add your API key in the settings dialog or add it to backend/.env file. If you add it to .env, make sure to restart the backend server."
)
return
# Get the Anthropic API key from the request. Fall back to environment variable if not provided.
# If neither is provided, we throw an error later only if Claude is used. # If neither is provided, we throw an error later only if Claude is used.
anthropic_api_key = get_from_settings_dialog_or_env( anthropic_api_key = None
params, "anthropicApiKey", ANTHROPIC_API_KEY if "anthropicApiKey" in params and params["anthropicApiKey"]:
) anthropic_api_key = params["anthropicApiKey"]
print("Using Anthropic API key from client-side settings dialog")
else:
anthropic_api_key = ANTHROPIC_API_KEY
if anthropic_api_key:
print("Using Anthropic API key from environment variable")
# Base URL for OpenAI API # Get the OpenAI Base URL from the request. Fall back to environment variable if not provided.
openai_base_url: str | None = None openai_base_url: Union[str, None] = None
# Disable user-specified OpenAI Base URL in prod # Disable user-specified OpenAI Base URL in prod
if not IS_PROD: if not os.environ.get("IS_PROD"):
openai_base_url = get_from_settings_dialog_or_env( if "openAiBaseURL" in params and params["openAiBaseURL"]:
params, "openAiBaseURL", OPENAI_BASE_URL openai_base_url = params["openAiBaseURL"]
) print("Using OpenAI Base URL from client-side settings dialog")
else:
openai_base_url = os.environ.get("OPENAI_BASE_URL")
if openai_base_url:
print("Using OpenAI Base URL from environment variable")
if not openai_base_url: if not openai_base_url:
print("Using official OpenAI URL") print("Using official OpenAI URL")
# Get the image generation flag from the request. Fall back to True if not provided. # Get the image generation flag from the request. Fall back to True if not provided.
should_generate_images = bool(params.get("isImageGenerationEnabled", True)) should_generate_images = (
params["isImageGenerationEnabled"]
return ExtractedParams( if "isImageGenerationEnabled" in params
stack=validated_stack, else True
input_mode=validated_input_mode,
code_generation_model=code_generation_model,
should_generate_images=should_generate_images,
openai_api_key=openai_api_key,
anthropic_api_key=anthropic_api_key,
openai_base_url=openai_base_url,
) )
print("generating code...")
await websocket.send_json({"type": "status", "value": "Generating code..."})
def get_from_settings_dialog_or_env( async def process_chunk(content: str):
params: dict[str, str], key: str, env_var: str | None await websocket.send_json({"type": "chunk", "value": content})
) -> str | None:
value = params.get(key)
if value:
print(f"Using {key} from client-side settings dialog")
return value
if env_var:
print(f"Using {key} from environment variable")
return env_var
return None
@router.websocket("/generate-code")
async def stream_code(websocket: WebSocket):
await websocket.accept()
print("Incoming websocket connection...")
## Communication protocol setup
async def throw_error(
message: str,
):
print(message)
await websocket.send_json({"type": "error", "value": message})
await websocket.close(APP_ERROR_WEB_SOCKET_CODE)
async def send_message(
type: Literal["chunk", "status", "setCode", "error"],
value: str,
variantIndex: int,
):
# Print for debugging on the backend
if type == "error":
print(f"Error (variant {variantIndex}): {value}")
elif type == "status":
print(f"Status (variant {variantIndex}): {value}")
await websocket.send_json(
{"type": type, "value": value, "variantIndex": variantIndex}
)
## Parameter extract and validation
# TODO: Are the values always strings?
params: dict[str, str] = await websocket.receive_json()
print("Received params")
extracted_params = await extract_params(params, throw_error)
stack = extracted_params.stack
input_mode = extracted_params.input_mode
code_generation_model = extracted_params.code_generation_model
openai_api_key = extracted_params.openai_api_key
openai_base_url = extracted_params.openai_base_url
anthropic_api_key = extracted_params.anthropic_api_key
should_generate_images = extracted_params.should_generate_images
# Auto-upgrade usage of older models
code_generation_model = auto_upgrade_model(code_generation_model)
print(
f"Generating {stack} code in {input_mode} mode using {code_generation_model}..."
)
for i in range(NUM_VARIANTS):
await send_message("status", "Generating code...", i)
### Prompt creation
# Image cache for updates so that we don't have to regenerate images # Image cache for updates so that we don't have to regenerate images
image_cache: Dict[str, str] = {} image_cache: Dict[str, str] = {}
try: # If this generation started off with imported code, we need to assemble the prompt differently
prompt_messages, image_cache = await create_prompt(params, stack, input_mode) if params.get("isImportedFromCode") and params["isImportedFromCode"]:
except: original_imported_code = params["history"][0]
await throw_error( prompt_messages = assemble_imported_code_prompt(
"Error assembling prompt. Contact support at support@picoapps.xyz" original_imported_code, valid_stack, code_generation_model
) )
raise for index, text in enumerate(params["history"][1:]):
if index % 2 == 0:
message: ChatCompletionMessageParam = {
"role": "user",
"content": text,
}
else:
message: ChatCompletionMessageParam = {
"role": "assistant",
"content": text,
}
prompt_messages.append(message)
else:
# Assemble the prompt
try:
if validated_input_mode == "image":
if params.get("resultImage") and params["resultImage"]:
prompt_messages = assemble_prompt(
params["image"], valid_stack, params["resultImage"]
)
else:
prompt_messages = assemble_prompt(params["image"], valid_stack)
elif validated_input_mode == "text":
prompt_messages = assemble_text_prompt(params["image"], valid_stack)
else:
await throw_error("Invalid input mode")
return
except:
await websocket.send_json(
{
"type": "error",
"value": "Error assembling prompt. Contact support at support@picoapps.xyz",
}
)
await websocket.close()
return
# Transform the history tree into message format for updates
if params["generationType"] == "update":
# TODO: Move this to frontend
for index, text in enumerate(params["history"]):
if index % 2 == 0:
message: ChatCompletionMessageParam = {
"role": "assistant",
"content": text,
}
else:
message: ChatCompletionMessageParam = {
"role": "user",
"content": text,
}
prompt_messages.append(message)
image_cache = create_alt_url_mapping(params["history"][-2])
if validated_input_mode == "video":
video_data_url = params["image"]
prompt_messages = await assemble_claude_prompt_video(video_data_url)
# pprint_prompt(prompt_messages) # type: ignore # pprint_prompt(prompt_messages) # type: ignore
### Code generation
async def process_chunk(content: str, variantIndex: int):
await send_message("chunk", content, variantIndex)
if SHOULD_MOCK_AI_RESPONSE: if SHOULD_MOCK_AI_RESPONSE:
completions = [await mock_completion(process_chunk, input_mode=input_mode)] completion = await mock_completion(
process_chunk, input_mode=validated_input_mode
)
else: else:
try: try:
if input_mode == "video": if validated_input_mode == "video":
if not anthropic_api_key: if not anthropic_api_key:
await throw_error( await throw_error(
"Video only works with Anthropic models. No Anthropic API key found. Please add the environment variable ANTHROPIC_API_KEY to backend/.env or in the settings dialog" "Video only works with Anthropic models. No Anthropic API key found. Please add the environment variable ANTHROPIC_API_KEY to backend/.env or in the settings dialog"
) )
raise Exception("No Anthropic key") raise Exception("No Anthropic key")
completions = [ completion = await stream_claude_response_native(
await stream_claude_response_native(
system_prompt=VIDEO_PROMPT, system_prompt=VIDEO_PROMPT,
messages=prompt_messages, # type: ignore messages=prompt_messages, # type: ignore
api_key=anthropic_api_key, api_key=anthropic_api_key,
callback=lambda x: process_chunk(x, 0), callback=lambda x: process_chunk(x),
model=Llm.CLAUDE_3_OPUS, model=Llm.CLAUDE_3_OPUS,
include_thinking=True, include_thinking=True,
) )
] exact_llm_version = Llm.CLAUDE_3_OPUS
else: elif (
code_generation_model == Llm.CLAUDE_3_SONNET
# Depending on the presence and absence of various keys, or code_generation_model == Llm.CLAUDE_3_5_SONNET_2024_06_20
# we decide which models to run ):
variant_models = [] if not anthropic_api_key:
if openai_api_key and anthropic_api_key:
variant_models = ["anthropic", "openai"]
elif openai_api_key:
variant_models = ["openai", "openai"]
elif anthropic_api_key:
variant_models = ["anthropic", "anthropic"]
else:
await throw_error( await throw_error(
"No OpenAI or Anthropic API key found. Please add the environment variable OPENAI_API_KEY or ANTHROPIC_API_KEY to backend/.env or in the settings dialog. If you add it to .env, make sure to restart the backend server." "No Anthropic API key found. Please add the environment variable ANTHROPIC_API_KEY to backend/.env or in the settings dialog"
) )
raise Exception("No OpenAI or Anthropic key") raise Exception("No Anthropic key")
tasks: List[Coroutine[Any, Any, str]] = [] completion = await stream_claude_response(
for index, model in enumerate(variant_models): prompt_messages, # type: ignore
if model == "openai": api_key=anthropic_api_key,
if openai_api_key is None: callback=lambda x: process_chunk(x),
await throw_error("OpenAI API key is missing.") model=code_generation_model,
raise Exception("OpenAI API key is missing.") )
exact_llm_version = code_generation_model
tasks.append( else:
stream_openai_response( completion = await stream_openai_response(
prompt_messages, prompt_messages, # type: ignore
api_key=openai_api_key, api_key=openai_api_key,
base_url=openai_base_url, base_url=openai_base_url,
callback=lambda x, i=index: process_chunk(x, i), callback=lambda x: process_chunk(x),
model=Llm.GPT_4O_2024_05_13, model=code_generation_model,
) )
) exact_llm_version = code_generation_model
elif model == "anthropic":
if anthropic_api_key is None:
await throw_error("Anthropic API key is missing.")
raise Exception("Anthropic API key is missing.")
tasks.append(
stream_claude_response(
prompt_messages,
api_key=anthropic_api_key,
callback=lambda x, i=index: process_chunk(x, i),
model=Llm.CLAUDE_3_5_SONNET_2024_06_20,
)
)
# Run the models in parallel and capture exceptions if any
completions = await asyncio.gather(*tasks, return_exceptions=True)
# If all generations failed, throw an error
all_generations_failed = all(
isinstance(completion, Exception) for completion in completions
)
if all_generations_failed:
await throw_error("Error generating code. Please contact support.")
# Print the all the underlying exceptions for debugging
for completion in completions:
traceback.print_exception(
type(completion), completion, completion.__traceback__
)
raise Exception("All generations failed")
# If some completions failed, replace them with empty strings
for index, completion in enumerate(completions):
if isinstance(completion, Exception):
completions[index] = ""
print("Generation failed for variant", index)
print("Models used for generation: ", variant_models)
except openai.AuthenticationError as e: except openai.AuthenticationError as e:
print("[GENERATE_CODE] Authentication failed", e) print("[GENERATE_CODE] Authentication failed", e)
error_message = ( error_message = (
@ -373,34 +314,42 @@ async def stream_code(websocket: WebSocket):
) )
return await throw_error(error_message) return await throw_error(error_message)
## Post-processing if validated_input_mode == "video":
completion = extract_tag_content("html", completion)
print("Exact used model for generation: ", exact_llm_version)
# Strip the completion of everything except the HTML content # Strip the completion of everything except the HTML content
completions = [extract_html_content(completion) for completion in completions] completion = extract_html_content(completion)
# Write the messages dict into a log so that we can debug later # Write the messages dict into a log so that we can debug later
write_logs(prompt_messages, completions[0]) write_logs(prompt_messages, completion) # type: ignore
## Image Generation try:
if should_generate_images:
for index, _ in enumerate(completions): await websocket.send_json(
await send_message("status", "Generating images...", index) {"type": "status", "value": "Generating images..."}
)
image_generation_tasks = [ updated_html = await generate_images(
perform_image_generation( completion,
completion, api_key=openai_api_key,
should_generate_images, base_url=openai_base_url,
openai_api_key, image_cache=image_cache,
openai_base_url, )
image_cache, else:
updated_html = completion
await websocket.send_json({"type": "setCode", "value": updated_html})
await websocket.send_json(
{"type": "status", "value": "Code generation complete."}
)
except Exception as e:
traceback.print_exc()
print("Image generation failed", e)
# Send set code even if image generation fails since that triggers
# the frontend to update history
await websocket.send_json({"type": "setCode", "value": completion})
await websocket.send_json(
{"type": "status", "value": "Image generation failed but code is complete."}
) )
for completion in completions
]
updated_completions = await asyncio.gather(*image_generation_tasks)
for index, updated_html in enumerate(updated_completions):
await send_message("setCode", updated_html, index)
await send_message("status", "Code generation complete.", index)
await websocket.close() await websocket.close()

View File

@ -10,12 +10,12 @@ from typing import Any, Coroutine
import asyncio import asyncio
from evals.config import EVALS_DIR from evals.config import EVALS_DIR
from evals.core import generate_code_for_image from evals.core import generate_code_core
from evals.utils import image_to_data_url from evals.utils import image_to_data_url
STACK = "html_tailwind" STACK = "ionic_tailwind"
# MODEL = Llm.CLAUDE_3_5_SONNET_2024_06_20 MODEL = Llm.GPT_4O_2024_05_13
N = 2 # Number of outputs to generate N = 1 # Number of outputs to generate
async def main(): async def main():
@ -29,21 +29,10 @@ async def main():
for filename in evals: for filename in evals:
filepath = os.path.join(INPUT_DIR, filename) filepath = os.path.join(INPUT_DIR, filename)
data_url = await image_to_data_url(filepath) data_url = await image_to_data_url(filepath)
for n in range(N): # Generate N tasks for each input for _ in range(N): # Generate N tasks for each input
if n == 0: task = generate_code_core(image_url=data_url, stack=STACK, model=MODEL)
task = generate_code_for_image(
image_url=data_url,
stack=STACK,
model=Llm.CLAUDE_3_5_SONNET_2024_06_20,
)
else:
task = generate_code_for_image(
image_url=data_url, stack=STACK, model=Llm.GPT_4O_2024_05_13
)
tasks.append(task) tasks.append(task)
print(f"Generating {len(tasks)} codes")
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
os.makedirs(OUTPUT_DIR, exist_ok=True) os.makedirs(OUTPUT_DIR, exist_ok=True)
@ -60,48 +49,4 @@ async def main():
file.write(content) file.write(content)
# async def text_main():
# OUTPUT_DIR = EVALS_DIR + "/outputs"
# GENERAL_TEXT_V1 = [
# "Login form",
# "Simple notification",
# "button",
# "saas dashboard",
# "landing page for barber shop",
# ]
# tasks: list[Coroutine[Any, Any, str]] = []
# for prompt in GENERAL_TEXT_V1:
# for n in range(N): # Generate N tasks for each input
# if n == 0:
# task = generate_code_for_text(
# text=prompt,
# stack=STACK,
# model=Llm.CLAUDE_3_5_SONNET_2024_06_20,
# )
# else:
# task = generate_code_for_text(
# text=prompt, stack=STACK, model=Llm.GPT_4O_2024_05_13
# )
# tasks.append(task)
# print(f"Generating {len(tasks)} codes")
# results = await asyncio.gather(*tasks)
# os.makedirs(OUTPUT_DIR, exist_ok=True)
# for i, content in enumerate(results):
# # Calculate index for filename and output number
# eval_index = i // N
# output_number = i % N
# filename = GENERAL_TEXT_V1[eval_index]
# # File name is derived from the original filename in evals with an added output number
# output_filename = f"{os.path.splitext(filename)[0]}_{output_number}.html"
# output_filepath = os.path.join(OUTPUT_DIR, output_filename)
# with open(output_filepath, "w") as file:
# file.write(content)
asyncio.run(main()) asyncio.run(main())

View File

@ -1,85 +0,0 @@
import asyncio
import os
from typing import List, Optional, Literal
from dotenv import load_dotenv
import aiohttp
from image_generation.core import process_tasks
EVALS = [
"Romantic Background",
"Company logo: A stylized green sprout emerging from a circle",
"Placeholder image of a PDF cover with abstract design",
"A complex bubble diagram showing various interconnected features and aspects of FestivalPro, with a large central bubble surrounded by smaller bubbles of different colors representing different categories and functionalities",
"A vibrant, abstract visualization of the RhythmRise experience ecosystem, featuring interconnected neon elements representing music, technology, and human connection",
"Banner with text 'LiblibAI学院 课程入口'",
"Profile picture of Pierre-Louis Labonne",
"Two hands holding iPhone 14 models with colorful displays",
"Portrait of a woman with long dark hair smiling at the camera",
"Threadless logo on a gradient background from light pink to coral",
"Jordan Schlansky Shows Conan His Favorite Nose Hair Trimmer",
"Team Coco",
"Intro to Large Language Models",
"Andrej Karpathy",
"He built a $200 million toy company",
"CNBC International",
"What will happen in year three of the war?",
"Channel",
"This is it",
"How ASML Dominates Chip Machines",
]
# Load environment variables
load_dotenv()
# Get API keys from environment variables
OPENAI_API_KEY: Optional[str] = os.getenv("OPENAI_API_KEY")
REPLICATE_API_TOKEN: Optional[str] = os.getenv("REPLICATE_API_TOKEN")
# Directory to save generated images
OUTPUT_DIR: str = "generated_images"
async def generate_and_save_images(
prompts: List[str],
model: Literal["dalle3", "flux"],
api_key: Optional[str],
) -> None:
# Ensure the output directory exists
os.makedirs(OUTPUT_DIR, exist_ok=True)
if api_key is None:
raise ValueError(f"API key for {model} is not set in the environment variables")
# Generate images
results: List[Optional[str]] = await process_tasks(
prompts, api_key, None, model=model
)
# Save images to disk
async with aiohttp.ClientSession() as session:
for i, image_url in enumerate(results):
if image_url:
# Get the image data
async with session.get(image_url) as response:
image_data: bytes = await response.read()
# Save the image with a filename based on the input eval
prefix = "replicate_" if model == "flux" else "dalle3_"
filename: str = (
f"{prefix}{prompts[i][:50].replace(' ', '_').replace(':', '')}.png"
)
filepath: str = os.path.join(OUTPUT_DIR, filename)
with open(filepath, "wb") as f:
f.write(image_data)
print(f"Saved {model} image: {filepath}")
else:
print(f"Failed to generate {model} image for prompt: {prompts[i]}")
async def main() -> None:
# await generate_and_save_images(EVALS, "dalle3", OPENAI_API_KEY)
await generate_and_save_images(EVALS, "flux", REPLICATE_API_TOKEN)
if __name__ == "__main__":
asyncio.run(main())

View File

@ -36,7 +36,6 @@
"codemirror": "^6.0.1", "codemirror": "^6.0.1",
"copy-to-clipboard": "^3.3.3", "copy-to-clipboard": "^3.3.3",
"html2canvas": "^1.4.1", "html2canvas": "^1.4.1",
"nanoid": "^5.0.7",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-dropzone": "^14.2.3", "react-dropzone": "^14.2.3",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -1,63 +1,67 @@
import { useEffect, useRef } from "react"; import { useEffect, useRef, useState } from "react";
import ImageUpload from "./components/ImageUpload";
import CodePreview from "./components/CodePreview";
import Preview from "./components/Preview";
import { generateCode } from "./generateCode"; import { generateCode } from "./generateCode";
import SettingsDialog from "./components/settings/SettingsDialog"; import Spinner from "./components/Spinner";
import classNames from "classnames";
import {
FaCode,
FaDesktop,
FaDownload,
FaMobile,
FaUndo,
} from "react-icons/fa";
import { Switch } from "./components/ui/switch";
import { Button } from "@/components/ui/button";
import { Textarea } from "@/components/ui/textarea";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "./components/ui/tabs";
import SettingsDialog from "./components/SettingsDialog";
import { AppState, CodeGenerationParams, EditorTheme, Settings } from "./types"; import { AppState, CodeGenerationParams, EditorTheme, Settings } from "./types";
import { IS_RUNNING_ON_CLOUD } from "./config"; import { IS_RUNNING_ON_CLOUD } from "./config";
import { PicoBadge } from "./components/messages/PicoBadge"; import { PicoBadge } from "./components/PicoBadge";
import { OnboardingNote } from "./components/messages/OnboardingNote"; import { OnboardingNote } from "./components/OnboardingNote";
import { usePersistedState } from "./hooks/usePersistedState"; import { usePersistedState } from "./hooks/usePersistedState";
import { UrlInputSection } from "./components/UrlInputSection";
import TermsOfServiceDialog from "./components/TermsOfServiceDialog"; import TermsOfServiceDialog from "./components/TermsOfServiceDialog";
import html2canvas from "html2canvas";
import { USER_CLOSE_WEB_SOCKET_CODE } from "./constants"; import { USER_CLOSE_WEB_SOCKET_CODE } from "./constants";
import { extractHistory } from "./components/history/utils"; import CodeTab from "./components/CodeTab";
import OutputSettingsSection from "./components/OutputSettingsSection";
import { History } from "./components/history/history_types";
import HistoryDisplay from "./components/history/HistoryDisplay";
import { extractHistoryTree } from "./components/history/utils";
import toast from "react-hot-toast"; import toast from "react-hot-toast";
import ImportCodeSection from "./components/ImportCodeSection";
import { Stack } from "./lib/stacks"; import { Stack } from "./lib/stacks";
import { CodeGenerationModel } from "./lib/models"; import { CodeGenerationModel } from "./lib/models";
import ModelSettingsSection from "./components/ModelSettingsSection";
import { extractHtml } from "./components/preview/extractHtml";
import useBrowserTabIndicator from "./hooks/useBrowserTabIndicator"; import useBrowserTabIndicator from "./hooks/useBrowserTabIndicator";
import TipLink from "./components/messages/TipLink"; import TipLink from "./components/core/TipLink";
import SelectAndEditModeToggleButton from "./components/select-and-edit/SelectAndEditModeToggleButton";
import { useAppStore } from "./store/app-store"; import { useAppStore } from "./store/app-store";
import { useProjectStore } from "./store/project-store"; import GenerateFromText from "./components/generate-from-text/GenerateFromText";
import Sidebar from "./components/sidebar/Sidebar";
import PreviewPane from "./components/preview/PreviewPane"; const IS_OPENAI_DOWN = false;
import DeprecationMessage from "./components/messages/DeprecationMessage";
import { GenerationSettings } from "./components/settings/GenerationSettings";
import StartPane from "./components/start-pane/StartPane";
import { takeScreenshot } from "./lib/takeScreenshot";
import { Commit } from "./components/commits/types";
import { createCommit } from "./components/commits/utils";
function App() { function App() {
const { const [appState, setAppState] = useState<AppState>(AppState.INITIAL);
// Inputs const [generatedCode, setGeneratedCode] = useState<string>("");
inputMode,
setInputMode,
isImportedFromCode,
setIsImportedFromCode,
referenceImages,
setReferenceImages,
head, const [inputMode, setInputMode] = useState<"image" | "video" | "text">(
commits, "image"
addCommit, );
removeCommit,
setHead,
appendCommitCode,
setCommitCode,
resetCommits,
resetHead,
// Outputs const [initialPrompt, setInitialPrompt] = useState<string>("");
appendExecutionConsole,
resetExecutionConsoles,
} = useProjectStore();
const { const [referenceImages, setReferenceImages] = useState<string[]>([]);
disableInSelectAndEditMode, const [executionConsole, setExecutionConsole] = useState<string[]>([]);
setUpdateInstruction, const [updateInstruction, setUpdateInstruction] = useState("");
appState, const [isImportedFromCode, setIsImportedFromCode] = useState<boolean>(false);
setAppState,
shouldIncludeResultImage, const { disableInSelectAndEditMode } = useAppStore();
setShouldIncludeResultImage,
} = useAppStore();
// Settings // Settings
const [settings, setSettings] = usePersistedState<Settings>( const [settings, setSettings] = usePersistedState<Settings>(
@ -76,20 +80,35 @@ function App() {
"setting" "setting"
); );
const wsRef = useRef<WebSocket>(null);
// Code generation model from local storage or the default value // Code generation model from local storage or the default value
const model = const selectedCodeGenerationModel =
settings.codeGenerationModel || CodeGenerationModel.GPT_4_VISION; settings.codeGenerationModel || CodeGenerationModel.GPT_4_VISION;
// App history
const [appHistory, setAppHistory] = useState<History>([]);
// Tracks the currently shown version from app history
const [currentVersion, setCurrentVersion] = useState<number | null>(null);
const [shouldIncludeResultImage, setShouldIncludeResultImage] =
useState<boolean>(false);
const wsRef = useRef<WebSocket>(null);
const showReactWarning =
selectedCodeGenerationModel ===
CodeGenerationModel.GPT_4_TURBO_2024_04_09 &&
settings.generatedCodeConfig === Stack.REACT_TAILWIND;
const showBetterModelMessage = const showBetterModelMessage =
model !== CodeGenerationModel.GPT_4O_2024_05_13 && selectedCodeGenerationModel !== CodeGenerationModel.GPT_4O_2024_05_13 &&
model !== CodeGenerationModel.CLAUDE_3_5_SONNET_2024_06_20 && selectedCodeGenerationModel !==
CodeGenerationModel.CLAUDE_3_5_SONNET_2024_06_20 &&
appState === AppState.INITIAL; appState === AppState.INITIAL;
const showSelectAndEditFeature = const showSelectAndEditFeature =
(model === CodeGenerationModel.GPT_4O_2024_05_13 || (selectedCodeGenerationModel === CodeGenerationModel.GPT_4O_2024_05_13 ||
model === CodeGenerationModel.CLAUDE_3_5_SONNET_2024_06_20) && selectedCodeGenerationModel ===
CodeGenerationModel.CLAUDE_3_5_SONNET_2024_06_20) &&
(settings.generatedCodeConfig === Stack.HTML_TAILWIND || (settings.generatedCodeConfig === Stack.HTML_TAILWIND ||
settings.generatedCodeConfig === Stack.HTML_CSS); settings.generatedCodeConfig === Stack.HTML_CSS);
@ -108,122 +127,168 @@ function App() {
} }
}, [settings.generatedCodeConfig, setSettings]); }, [settings.generatedCodeConfig, setSettings]);
// Functions const takeScreenshot = async (): Promise<string> => {
const iframeElement = document.querySelector(
"#preview-desktop"
) as HTMLIFrameElement;
if (!iframeElement?.contentWindow?.document.body) {
return "";
}
const canvas = await html2canvas(iframeElement.contentWindow.document.body);
const png = canvas.toDataURL("image/png");
return png;
};
const downloadCode = () => {
// Create a blob from the generated code
const blob = new Blob([generatedCode], { type: "text/html" });
const url = URL.createObjectURL(blob);
// Create an anchor element and set properties for download
const a = document.createElement("a");
a.href = url;
a.download = "index.html"; // Set the file name for download
document.body.appendChild(a); // Append to the document
a.click(); // Programmatically click the anchor to trigger download
// Clean up by removing the anchor and revoking the Blob URL
document.body.removeChild(a);
URL.revokeObjectURL(url);
};
const reset = () => { const reset = () => {
setAppState(AppState.INITIAL); setAppState(AppState.INITIAL);
setShouldIncludeResultImage(false); setGeneratedCode("");
setUpdateInstruction("");
disableInSelectAndEditMode();
resetExecutionConsoles();
resetCommits();
resetHead();
// Inputs
setInputMode("image");
setReferenceImages([]); setReferenceImages([]);
setInitialPrompt("");
setExecutionConsole([]);
setUpdateInstruction("");
setIsImportedFromCode(false); setIsImportedFromCode(false);
setAppHistory([]);
setCurrentVersion(null);
setShouldIncludeResultImage(false);
disableInSelectAndEditMode();
}; };
const regenerate = () => { const regenerate = () => {
if (head === null) { if (currentVersion === null) {
toast.error( toast.error(
"No current version set. Please contact support via chat or Github." "No current version set. Please open a Github issue as this shouldn't happen."
); );
throw new Error("Regenerate called with no head"); return;
} }
// Retrieve the previous command // Retrieve the previous command
const currentCommit = commits[head]; const previousCommand = appHistory[currentVersion];
if (currentCommit.type !== "ai_create") { if (previousCommand.type !== "ai_create") {
toast.error("Only the first version can be regenerated."); toast.error("Only the first version can be regenerated.");
return; return;
} }
// Re-run the create // Re-run the create
if (inputMode === "image" || inputMode === "video") {
doCreate(referenceImages, inputMode); doCreate(referenceImages, inputMode);
} else {
// TODO: Fix this
doCreateFromText(initialPrompt);
}
}; };
// Used when the user cancels the code generation
const cancelCodeGeneration = () => { const cancelCodeGeneration = () => {
wsRef.current?.close?.(USER_CLOSE_WEB_SOCKET_CODE); wsRef.current?.close?.(USER_CLOSE_WEB_SOCKET_CODE);
// make sure stop can correct the state even if the websocket is already closed
cancelCodeGenerationAndReset();
}; };
// Used for code generation failure as well const previewCode =
const cancelCodeGenerationAndReset = (commit: Commit) => { inputMode === "video" && appState === AppState.CODING
// When the current commit is the first version, reset the entire app state ? extractHtml(generatedCode)
if (commit.type === "ai_create") { : generatedCode;
const cancelCodeGenerationAndReset = () => {
// When this is the first version, reset the entire app state
if (currentVersion === null) {
reset(); reset();
} else { } else {
// Otherwise, remove current commit from commits // Otherwise, revert to the last version
removeCommit(commit.hash); setGeneratedCode(appHistory[currentVersion].code);
// Revert to parent commit
const parentCommitHash = commit.parentHash;
if (parentCommitHash) {
setHead(parentCommitHash);
} else {
throw new Error("Parent commit not found");
}
setAppState(AppState.CODE_READY); setAppState(AppState.CODE_READY);
} }
}; };
function doGenerateCode(params: CodeGenerationParams) { function doGenerateCode(
// Reset the execution console params: CodeGenerationParams,
resetExecutionConsoles(); parentVersion: number | null
) {
// Set the app state setExecutionConsole([]);
setAppState(AppState.CODING); setAppState(AppState.CODING);
// Merge settings with params // Merge settings with params
const updatedParams = { ...params, ...settings }; const updatedParams = { ...params, ...settings };
const baseCommitObject = {
variants: [{ code: "" }, { code: "" }],
};
const commitInputObject =
params.generationType === "create"
? {
...baseCommitObject,
type: "ai_create" as const,
parentHash: null,
inputs: { image_url: referenceImages[0] },
}
: {
...baseCommitObject,
type: "ai_edit" as const,
parentHash: head,
inputs: {
prompt: params.history
? params.history[params.history.length - 1]
: "",
},
};
// Create a new commit and set it as the head
const commit = createCommit(commitInputObject);
addCommit(commit);
setHead(commit.hash);
generateCode( generateCode(
wsRef, wsRef,
updatedParams, updatedParams,
// On change // On change
(token, variantIndex) => { (token) => setGeneratedCode((prev) => prev + token),
appendCommitCode(commit.hash, variantIndex, token);
},
// On set code // On set code
(code, variantIndex) => { (code) => {
setCommitCode(commit.hash, variantIndex, code); setGeneratedCode(code);
if (params.generationType === "create") {
if (inputMode === "image" || inputMode === "video") {
setAppHistory([
{
type: "ai_create",
parentIndex: null,
code,
inputs: { image_url: referenceImages[0] },
},
]);
} else {
setAppHistory([
{
type: "ai_create",
parentIndex: null,
code,
inputs: { text: params.image },
},
]);
}
setCurrentVersion(0);
} else {
setAppHistory((prev) => {
// Validate parent version
if (parentVersion === null) {
toast.error(
"No parent version set. Contact support or open a Github issue."
);
return prev;
}
const newHistory: History = [
...prev,
{
type: "ai_edit",
parentIndex: parentVersion,
code,
inputs: {
prompt: params.history
? params.history[params.history.length - 1]
: updateInstruction,
},
},
];
setCurrentVersion(newHistory.length - 1);
return newHistory;
});
}
}, },
// On status update // On status update
(line, variantIndex) => appendExecutionConsole(variantIndex, line), (line) => setExecutionConsole((prev) => [...prev, line]),
// On cancel // On cancel
() => { () => {
cancelCodeGenerationAndReset(commit); cancelCodeGenerationAndReset();
}, },
// On complete // On complete
() => { () => {
@ -237,20 +302,36 @@ function App() {
// Reset any existing state // Reset any existing state
reset(); reset();
// Set the input states
setReferenceImages(referenceImages); setReferenceImages(referenceImages);
setInputMode(inputMode); setInputMode(inputMode);
// Kick off the code generation
if (referenceImages.length > 0) { if (referenceImages.length > 0) {
doGenerateCode({ doGenerateCode(
{
generationType: "create", generationType: "create",
image: referenceImages[0], image: referenceImages[0],
inputMode, inputMode,
}); },
currentVersion
);
} }
} }
function doCreateFromText(text: string) {
// Reset any existing state
reset();
setInputMode("text");
setInitialPrompt(text);
doGenerateCode(
{
generationType: "create",
inputMode: "text",
image: text,
},
currentVersion
);
}
// Subsequent updates // Subsequent updates
async function doUpdate( async function doUpdate(
updateInstruction: string, updateInstruction: string,
@ -261,21 +342,21 @@ function App() {
return; return;
} }
if (head === null) { if (currentVersion === null) {
toast.error( toast.error(
"No current version set. Contact support or open a Github issue." "No current version set. Contact support or open a Github issue."
); );
throw new Error("Update called with no head"); return;
} }
let historyTree; let historyTree;
try { try {
historyTree = extractHistory(head, commits); historyTree = extractHistoryTree(appHistory, currentVersion);
} catch { } catch {
toast.error( toast.error(
"Version history is invalid. This shouldn't happen. Please contact support or open a Github issue." "Version history is invalid. This shouldn't happen. Please contact support or open a Github issue."
); );
throw new Error("Invalid version history"); return;
} }
let modifiedUpdateInstruction = updateInstruction; let modifiedUpdateInstruction = updateInstruction;
@ -289,19 +370,34 @@ function App() {
} }
const updatedHistory = [...historyTree, modifiedUpdateInstruction]; const updatedHistory = [...historyTree, modifiedUpdateInstruction];
const resultImage = shouldIncludeResultImage
? await takeScreenshot()
: undefined;
doGenerateCode({ if (shouldIncludeResultImage) {
const resultImage = await takeScreenshot();
doGenerateCode(
{
generationType: "update", generationType: "update",
inputMode, inputMode,
image: referenceImages[0], image: referenceImages[0],
resultImage, resultImage: resultImage,
history: updatedHistory, history: updatedHistory,
isImportedFromCode, isImportedFromCode,
}); },
currentVersion
);
} else {
doGenerateCode(
{
generationType: "update",
inputMode,
image: inputMode === "text" ? initialPrompt : referenceImages[0],
history: updatedHistory,
isImportedFromCode,
},
currentVersion
);
}
setGeneratedCode("");
setUpdateInstruction(""); setUpdateInstruction("");
} }
@ -319,24 +415,29 @@ function App() {
})); }));
} }
function setCodeGenerationModel(codeGenerationModel: CodeGenerationModel) {
setSettings((prev) => ({
...prev,
codeGenerationModel,
}));
}
function importFromCode(code: string, stack: Stack) { function importFromCode(code: string, stack: Stack) {
// Set input state
setIsImportedFromCode(true); setIsImportedFromCode(true);
// Set up this project // Set up this project
setGeneratedCode(code);
setStack(stack); setStack(stack);
setAppHistory([
// Create a new commit and set it as the head {
const commit = createCommit({
type: "code_create", type: "code_create",
parentHash: null, parentIndex: null,
variants: [{ code }], code,
inputs: null, inputs: { code },
}); },
addCommit(commit); ]);
setHead(commit.hash); setCurrentVersion(0);
// Set the app state
setAppState(AppState.CODE_READY); setAppState(AppState.CODE_READY);
} }
@ -351,47 +452,278 @@ function App() {
)} )}
<div className="lg:fixed lg:inset-y-0 lg:z-40 lg:flex lg:w-96 lg:flex-col"> <div className="lg:fixed lg:inset-y-0 lg:z-40 lg:flex lg:w-96 lg:flex-col">
<div className="flex grow flex-col gap-y-2 overflow-y-auto border-r border-gray-200 bg-white px-6 dark:bg-zinc-950 dark:text-white"> <div className="flex grow flex-col gap-y-2 overflow-y-auto border-r border-gray-200 bg-white px-6 dark:bg-zinc-950 dark:text-white">
{/* Header with access to settings */}
<div className="flex items-center justify-between mt-10 mb-2"> <div className="flex items-center justify-between mt-10 mb-2">
<h1 className="text-2xl ">Screenshot to Code</h1> <h1 className="text-2xl ">Screenshot to Code</h1>
<SettingsDialog settings={settings} setSettings={setSettings} /> <SettingsDialog settings={settings} setSettings={setSettings} />
</div> </div>
{/* Generation settings like stack and model */} <OutputSettingsSection
<GenerationSettings settings={settings} setSettings={setSettings} /> stack={settings.generatedCodeConfig}
setStack={(config) => setStack(config)}
shouldDisableUpdates={
appState === AppState.CODING || appState === AppState.CODE_READY
}
/>
{/* Show auto updated message when older models are choosen */} <ModelSettingsSection
{showBetterModelMessage && <DeprecationMessage />} codeGenerationModel={selectedCodeGenerationModel}
setCodeGenerationModel={setCodeGenerationModel}
shouldDisableUpdates={
appState === AppState.CODING || appState === AppState.CODE_READY
}
/>
{showReactWarning && (
<div className="text-sm bg-yellow-200 rounded p-2">
Sorry - React is not currently working with GPT-4 Turbo. Please
use GPT-4 Vision or Claude Sonnet. We are working on a fix.
</div>
)}
{showBetterModelMessage && (
<div className="rounded-lg p-2 bg-fuchsia-200">
<p className="text-gray-800 text-sm">
Now supporting GPT-4o and Claude Sonnet 3.5. Higher quality and
2x faster. Give it a try!
</p>
</div>
)}
{/* Show tip link until coding is complete */}
{appState !== AppState.CODE_READY && <TipLink />} {appState !== AppState.CODE_READY && <TipLink />}
{IS_RUNNING_ON_CLOUD && !settings.openAiApiKey && <OnboardingNote />} {IS_RUNNING_ON_CLOUD && !settings.openAiApiKey && <OnboardingNote />}
{/* Rest of the sidebar when we're not in the initial state */} {IS_OPENAI_DOWN && (
<div className="bg-black text-white dark:bg-white dark:text-black p-3 rounded">
OpenAI API is currently down. Try back in 30 minutes or later. We
apologize for the inconvenience.
</div>
)}
{appState === AppState.INITIAL && (
<GenerateFromText doCreateFromText={doCreateFromText} />
)}
{(appState === AppState.CODING || {(appState === AppState.CODING ||
appState === AppState.CODE_READY) && ( appState === AppState.CODE_READY) && (
<Sidebar <>
showSelectAndEditFeature={showSelectAndEditFeature} {/* Show code preview only when coding */}
doUpdate={doUpdate} {appState === AppState.CODING && (
regenerate={regenerate} <div className="flex flex-col">
cancelCodeGeneration={cancelCodeGeneration} {/* Speed disclaimer for video mode */}
{inputMode === "video" && (
<div
className="bg-yellow-100 border-l-4 border-yellow-500 text-yellow-700
p-2 text-xs mb-4 mt-1"
>
Code generation from videos can take 3-4 minutes. We do
multiple passes to get the best result. Please be patient.
</div>
)}
<div className="flex items-center gap-x-1">
<Spinner />
{executionConsole.slice(-1)[0]}
</div>
<CodePreview code={generatedCode} />
<div className="flex w-full">
<Button
onClick={cancelCodeGeneration}
className="w-full dark:text-white dark:bg-gray-700"
>
Cancel
</Button>
</div>
</div>
)}
{appState === AppState.CODE_READY && (
<div>
<div className="grid w-full gap-2">
<Textarea
placeholder="Tell the AI what to change..."
onChange={(e) => setUpdateInstruction(e.target.value)}
value={updateInstruction}
/>
<div className="flex justify-between items-center gap-x-2">
<div className="font-500 text-xs text-slate-700 dark:text-white">
Include screenshot of current version?
</div>
<Switch
checked={shouldIncludeResultImage}
onCheckedChange={setShouldIncludeResultImage}
className="dark:bg-gray-700"
/>
</div>
<Button
onClick={() => doUpdate(updateInstruction)}
className="dark:text-white dark:bg-gray-700 update-btn"
>
Update
</Button>
</div>
<div className="flex items-center justify-end gap-x-2 mt-2">
<Button
onClick={regenerate}
className="flex items-center gap-x-2 dark:text-white dark:bg-gray-700 regenerate-btn"
>
🔄 Regenerate
</Button>
{showSelectAndEditFeature && (
<SelectAndEditModeToggleButton />
)}
</div>
<div className="flex justify-end items-center mt-2">
<TipLink />
</div>
</div>
)}
{/* Reference image display */}
<div className="flex gap-x-2 mt-2">
{referenceImages.length > 0 && (
<div className="flex flex-col">
<div
className={classNames({
"scanning relative": appState === AppState.CODING,
})}
>
{inputMode === "image" && (
<img
className="w-[340px] border border-gray-200 rounded-md"
src={referenceImages[0]}
alt="Reference"
/> />
)} )}
{inputMode === "video" && (
<video
muted
autoPlay
loop
className="w-[340px] border border-gray-200 rounded-md"
src={referenceImages[0]}
/>
)}
</div>
<div className="text-gray-400 uppercase text-sm text-center mt-1">
{inputMode === "video"
? "Original Video"
: "Original Screenshot"}
</div>
</div>
)}
<div className="bg-gray-400 px-4 py-2 rounded text-sm hidden">
<h2 className="text-lg mb-4 border-b border-gray-800">
Console
</h2>
{executionConsole.map((line, index) => (
<div
key={index}
className="border-b border-gray-400 mb-2 text-gray-600 font-mono"
>
{line}
</div>
))}
</div>
</div>
</>
)}
{
<HistoryDisplay
history={appHistory}
currentVersion={currentVersion}
revertToVersion={(index) => {
if (
index < 0 ||
index >= appHistory.length ||
!appHistory[index]
)
return;
setCurrentVersion(index);
setGeneratedCode(appHistory[index].code);
}}
shouldDisableReverts={appState === AppState.CODING}
/>
}
</div> </div>
</div> </div>
<main className="py-2 lg:pl-96"> <main className="py-2 lg:pl-96">
{appState === AppState.INITIAL && ( {appState === AppState.INITIAL && (
<StartPane <div className="flex flex-col justify-center items-center gap-y-10">
<ImageUpload setReferenceImages={doCreate} />
<UrlInputSection
doCreate={doCreate} doCreate={doCreate}
importFromCode={importFromCode} screenshotOneApiKey={settings.screenshotOneApiKey}
settings={settings}
/> />
<ImportCodeSection importFromCode={importFromCode} />
</div>
)} )}
{(appState === AppState.CODING || appState === AppState.CODE_READY) && ( {(appState === AppState.CODING || appState === AppState.CODE_READY) && (
<PreviewPane doUpdate={doUpdate} reset={reset} settings={settings} /> <div className="ml-4">
<Tabs defaultValue="desktop">
<div className="flex justify-between mr-8 mb-4">
<div className="flex items-center gap-x-2">
{appState === AppState.CODE_READY && (
<>
<Button
onClick={reset}
className="flex items-center ml-4 gap-x-2 dark:text-white dark:bg-gray-700"
>
<FaUndo />
Reset
</Button>
<Button
onClick={downloadCode}
variant="secondary"
className="flex items-center gap-x-2 mr-4 dark:text-white dark:bg-gray-700 download-btn"
>
<FaDownload /> Download
</Button>
</>
)}
</div>
<div className="flex items-center">
<TabsList>
<TabsTrigger value="desktop" className="flex gap-x-2">
<FaDesktop /> Desktop
</TabsTrigger>
<TabsTrigger value="mobile" className="flex gap-x-2">
<FaMobile /> Mobile
</TabsTrigger>
<TabsTrigger value="code" className="flex gap-x-2">
<FaCode />
Code
</TabsTrigger>
</TabsList>
</div>
</div>
<TabsContent value="desktop">
<Preview
code={previewCode}
device="desktop"
doUpdate={doUpdate}
/>
</TabsContent>
<TabsContent value="mobile">
<Preview
code={previewCode}
device="mobile"
doUpdate={doUpdate}
/>
</TabsContent>
<TabsContent value="code">
<CodeTab
code={previewCode}
setCode={setGeneratedCode}
settings={settings}
/>
</TabsContent>
</Tabs>
</div>
)} )}
</main> </main>
</div> </div>

View File

@ -1,7 +1,7 @@
import { FaCopy } from "react-icons/fa"; import { FaCopy } from "react-icons/fa";
import CodeMirror from "./CodeMirror"; import CodeMirror from "./CodeMirror";
import { Button } from "../ui/button"; import { Button } from "./ui/button";
import { Settings } from "../../types"; import { Settings } from "../types";
import copy from "copy-to-clipboard"; import copy from "copy-to-clipboard";
import { useCallback } from "react"; import { useCallback } from "react";
import toast from "react-hot-toast"; import toast from "react-hot-toast";

View File

@ -10,7 +10,7 @@ import {
DialogTrigger, DialogTrigger,
} from "./ui/dialog"; } from "./ui/dialog";
import { Textarea } from "./ui/textarea"; import { Textarea } from "./ui/textarea";
import OutputSettingsSection from "./settings/OutputSettingsSection"; import OutputSettingsSection from "./OutputSettingsSection";
import toast from "react-hot-toast"; import toast from "react-hot-toast";
import { Stack } from "../lib/stacks"; import { Stack } from "../lib/stacks";

View File

@ -0,0 +1,65 @@
import {
Select,
SelectContent,
SelectGroup,
SelectItem,
SelectTrigger,
} from "./ui/select";
import {
CODE_GENERATION_MODEL_DESCRIPTIONS,
CodeGenerationModel,
} from "../lib/models";
import { Badge } from "./ui/badge";
interface Props {
codeGenerationModel: CodeGenerationModel;
setCodeGenerationModel: (codeGenerationModel: CodeGenerationModel) => void;
shouldDisableUpdates?: boolean;
}
function ModelSettingsSection({
codeGenerationModel,
setCodeGenerationModel,
shouldDisableUpdates = false,
}: Props) {
return (
<div className="flex flex-col gap-y-2 justify-between text-sm">
<div className="grid grid-cols-3 items-center gap-4">
<span>AI Model:</span>
<Select
value={codeGenerationModel}
onValueChange={(value: string) =>
setCodeGenerationModel(value as CodeGenerationModel)
}
disabled={shouldDisableUpdates}
>
<SelectTrigger className="col-span-2" id="output-settings-js">
<span className="font-semibold">
{CODE_GENERATION_MODEL_DESCRIPTIONS[codeGenerationModel].name}
</span>
</SelectTrigger>
<SelectContent>
<SelectGroup>
{Object.values(CodeGenerationModel).map((model) => (
<SelectItem key={model} value={model}>
<div className="flex items-center">
<span className="font-semibold">
{CODE_GENERATION_MODEL_DESCRIPTIONS[model].name}
</span>
{CODE_GENERATION_MODEL_DESCRIPTIONS[model].inBeta && (
<Badge className="ml-2" variant="secondary">
Beta
</Badge>
)}
</div>
</SelectItem>
))}
</SelectGroup>
</SelectContent>
</Select>
</div>
</div>
);
}
export default ModelSettingsSection;

View File

@ -5,9 +5,9 @@ import {
SelectGroup, SelectGroup,
SelectItem, SelectItem,
SelectTrigger, SelectTrigger,
} from "../ui/select"; } from "./ui/select";
import { Badge } from "../ui/badge"; import { Badge } from "./ui/badge";
import { Stack, STACK_DESCRIPTIONS } from "../../lib/stacks"; import { Stack, STACK_DESCRIPTIONS } from "../lib/stacks";
function generateDisplayComponent(stack: Stack) { function generateDisplayComponent(stack: Stack) {
const stackComponents = STACK_DESCRIPTIONS[stack].components; const stackComponents = STACK_DESCRIPTIONS[stack].components;

View File

@ -1,7 +1,7 @@
import { useEffect, useRef, useState } from "react"; import { useEffect, useRef, useState } from "react";
import classNames from "classnames"; import classNames from "classnames";
import useThrottle from "../../hooks/useThrottle"; import useThrottle from "../hooks/useThrottle";
import EditPopup from "../select-and-edit/EditPopup"; import EditPopup from "./select-and-edit/EditPopup";
interface Props { interface Props {
code: string; code: string;
@ -9,7 +9,7 @@ interface Props {
doUpdate: (updateInstruction: string, selectedElement?: HTMLElement) => void; doUpdate: (updateInstruction: string, selectedElement?: HTMLElement) => void;
} }
function PreviewComponent({ code, device, doUpdate }: Props) { function Preview({ code, device, doUpdate }: Props) {
const iframeRef = useRef<HTMLIFrameElement | null>(null); const iframeRef = useRef<HTMLIFrameElement | null>(null);
// Don't update code more often than every 200ms. // Don't update code more often than every 200ms.
@ -53,4 +53,4 @@ function PreviewComponent({ code, device, doUpdate }: Props) {
); );
} }
export default PreviewComponent; export default Preview;

View File

@ -9,19 +9,19 @@ import {
DialogTrigger, DialogTrigger,
} from "@/components/ui/dialog"; } from "@/components/ui/dialog";
import { FaCog } from "react-icons/fa"; import { FaCog } from "react-icons/fa";
import { EditorTheme, Settings } from "../../types"; import { EditorTheme, Settings } from "../types";
import { Switch } from "../ui/switch"; import { Switch } from "./ui/switch";
import { Label } from "../ui/label"; import { Label } from "./ui/label";
import { Input } from "../ui/input"; import { Input } from "./ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger } from "../ui/select"; import { Select, SelectContent, SelectItem, SelectTrigger } from "./ui/select";
import { capitalize } from "../../lib/utils"; import { capitalize } from "../lib/utils";
import { IS_RUNNING_ON_CLOUD } from "../../config"; import { IS_RUNNING_ON_CLOUD } from "../config";
import { import {
Accordion, Accordion,
AccordionContent, AccordionContent,
AccordionItem, AccordionItem,
AccordionTrigger, AccordionTrigger,
} from "../ui/accordion"; } from "./ui/accordion";
interface Props { interface Props {
settings: Settings; settings: Settings;

View File

@ -1,37 +0,0 @@
export type CommitHash = string;
export type Variant = {
code: string;
};
export type BaseCommit = {
hash: CommitHash;
parentHash: CommitHash | null;
dateCreated: Date;
isCommitted: boolean;
variants: Variant[];
selectedVariantIndex: number;
};
export type CommitType = "ai_create" | "ai_edit" | "code_create";
export type AiCreateCommit = BaseCommit & {
type: "ai_create";
inputs: {
image_url: string;
};
};
export type AiEditCommit = BaseCommit & {
type: "ai_edit";
inputs: {
prompt: string;
};
};
export type CodeCreateCommit = BaseCommit & {
type: "code_create";
inputs: null;
};
export type Commit = AiCreateCommit | AiEditCommit | CodeCreateCommit;

View File

@ -1,32 +0,0 @@
import { nanoid } from "nanoid";
import {
AiCreateCommit,
AiEditCommit,
CodeCreateCommit,
Commit,
} from "./types";
export function createCommit(
commit:
| Omit<
AiCreateCommit,
"hash" | "dateCreated" | "selectedVariantIndex" | "isCommitted"
>
| Omit<
AiEditCommit,
"hash" | "dateCreated" | "selectedVariantIndex" | "isCommitted"
>
| Omit<
CodeCreateCommit,
"hash" | "dateCreated" | "selectedVariantIndex" | "isCommitted"
>
): Commit {
const hash = nanoid();
return {
...commit,
hash,
isCommitted: false,
dateCreated: new Date(),
selectedVariantIndex: 0,
};
}

View File

@ -1,25 +0,0 @@
import React from "react";
import { BsArrowReturnLeft } from "react-icons/bs";
interface KeyboardShortcutBadgeProps {
letter: string;
}
const KeyboardShortcutBadge: React.FC<KeyboardShortcutBadgeProps> = ({
letter,
}) => {
const icon =
letter.toLowerCase() === "enter" || letter.toLowerCase() === "return" ? (
<BsArrowReturnLeft />
) : (
letter.toUpperCase()
);
return (
<span className="font-mono text-xs ml-2 rounded bg-gray-700 dark:bg-gray-900 text-white py-[2px] px-2">
{icon}
</span>
);
};
export default KeyboardShortcutBadge;

View File

@ -0,0 +1,57 @@
import { useState, useRef, useEffect } from "react";
import { Button } from "../ui/button";
import { Textarea } from "../ui/textarea";
import toast from "react-hot-toast";
interface GenerateFromTextProps {
doCreateFromText: (text: string) => void;
}
function GenerateFromText({ doCreateFromText }: GenerateFromTextProps) {
const [isOpen, setIsOpen] = useState(false);
const [text, setText] = useState("");
const textareaRef = useRef<HTMLTextAreaElement>(null);
useEffect(() => {
if (isOpen && textareaRef.current) {
textareaRef.current.focus();
}
}, [isOpen]);
const handleGenerate = () => {
if (text.trim() === "") {
// Assuming there's a toast function available in the context
toast.error("Please enter a prompt to generate from");
return;
}
doCreateFromText(text);
};
return (
<div className="mt-4">
{!isOpen ? (
<div className="flex justify-center">
<Button variant="secondary" onClick={() => setIsOpen(true)}>
Generate from text prompt [BETA]
</Button>
</div>
) : (
<>
<Textarea
ref={textareaRef}
rows={2}
placeholder="A Saas admin dashboard"
className="w-full mb-4"
value={text}
onChange={(e) => setText(e.target.value)}
/>
<div className="flex justify-end">
<Button onClick={handleGenerate}>Generate</Button>
</div>
</>
)}
</div>
);
}
export default GenerateFromText;

View File

@ -1,3 +1,4 @@
import { History } from "./history_types";
import toast from "react-hot-toast"; import toast from "react-hot-toast";
import classNames from "classnames"; import classNames from "classnames";
@ -10,23 +11,21 @@ import {
} from "../ui/collapsible"; } from "../ui/collapsible";
import { Button } from "../ui/button"; import { Button } from "../ui/button";
import { CaretSortIcon } from "@radix-ui/react-icons"; import { CaretSortIcon } from "@radix-ui/react-icons";
import { useProjectStore } from "../../store/project-store";
interface Props { interface Props {
history: History;
currentVersion: number | null;
revertToVersion: (version: number) => void;
shouldDisableReverts: boolean; shouldDisableReverts: boolean;
} }
export default function HistoryDisplay({ shouldDisableReverts }: Props) { export default function HistoryDisplay({
const { commits, head, setHead } = useProjectStore(); history,
currentVersion,
// Put all commits into an array and sort by created date (oldest first) revertToVersion,
const flatHistory = Object.values(commits).sort( shouldDisableReverts,
(a, b) => }: Props) {
new Date(a.dateCreated).getTime() - new Date(b.dateCreated).getTime() const renderedHistory = renderHistory(history, currentVersion);
);
// Annotate history items with a summary, parent version, etc.
const renderedHistory = renderHistory(flatHistory);
return renderedHistory.length === 0 ? null : ( return renderedHistory.length === 0 ? null : (
<div className="flex flex-col h-screen"> <div className="flex flex-col h-screen">
@ -40,8 +39,8 @@ export default function HistoryDisplay({ shouldDisableReverts }: Props) {
"flex items-center justify-between space-x-2 w-full pr-2", "flex items-center justify-between space-x-2 w-full pr-2",
"border-b cursor-pointer", "border-b cursor-pointer",
{ {
" hover:bg-black hover:text-white": item.hash === head, " hover:bg-black hover:text-white": !item.isActive,
"bg-slate-500 text-white": item.hash === head, "bg-slate-500 text-white": item.isActive,
} }
)} )}
> >
@ -52,14 +51,14 @@ export default function HistoryDisplay({ shouldDisableReverts }: Props) {
? toast.error( ? toast.error(
"Please wait for code generation to complete before viewing an older version." "Please wait for code generation to complete before viewing an older version."
) )
: setHead(item.hash) : revertToVersion(index)
} }
> >
<div className="flex gap-x-1 truncate"> <div className="flex gap-x-1 truncate">
<h2 className="text-sm truncate">{item.summary}</h2> <h2 className="text-sm truncate">{item.summary}</h2>
{item.parentVersion !== null && ( {item.parentVersion !== null && (
<h2 className="text-sm"> <h2 className="text-sm">
(parent: v{item.parentVersion}) (parent: {item.parentVersion})
</h2> </h2>
)} )}
</div> </div>

View File

@ -0,0 +1,45 @@
export type HistoryItemType = "ai_create" | "ai_edit" | "code_create";
type CommonHistoryItem = {
parentIndex: null | number;
code: string;
};
export type HistoryItem =
| ({
type: "ai_create";
inputs: AiCreateInputs | AiCreateInputsText;
} & CommonHistoryItem)
| ({
type: "ai_edit";
inputs: AiEditInputs;
} & CommonHistoryItem)
| ({
type: "code_create";
inputs: CodeCreateInputs;
} & CommonHistoryItem);
export type AiCreateInputs = {
image_url: string;
};
export type AiCreateInputsText = {
text: string;
};
export type AiEditInputs = {
prompt: string;
};
export type CodeCreateInputs = {
code: string;
};
export type History = HistoryItem[];
export type RenderedHistoryItem = {
type: string;
summary: string;
parentVersion: string | null;
isActive: boolean;
};

View File

@ -1,125 +1,91 @@
import { extractHistory, renderHistory } from "./utils"; import { extractHistoryTree, renderHistory } from "./utils";
import { Commit, CommitHash } from "../commits/types"; import type { History } from "./history_types";
const basicLinearHistory: Record<CommitHash, Commit> = { const basicLinearHistory: History = [
"0": { {
hash: "0",
dateCreated: new Date(),
isCommitted: false,
type: "ai_create", type: "ai_create",
parentHash: null, parentIndex: null,
variants: [{ code: "<html>1. create</html>" }], code: "<html>1. create</html>",
selectedVariantIndex: 0,
inputs: { inputs: {
image_url: "", image_url: "",
}, },
}, },
"1": { {
hash: "1",
dateCreated: new Date(),
isCommitted: false,
type: "ai_edit", type: "ai_edit",
parentHash: "0", parentIndex: 0,
variants: [{ code: "<html>2. edit with better icons</html>" }], code: "<html>2. edit with better icons</html>",
selectedVariantIndex: 0,
inputs: { inputs: {
prompt: "use better icons", prompt: "use better icons",
}, },
}, },
"2": { {
hash: "2",
dateCreated: new Date(),
isCommitted: false,
type: "ai_edit", type: "ai_edit",
parentHash: "1", parentIndex: 1,
variants: [{ code: "<html>3. edit with better icons and red text</html>" }], code: "<html>3. edit with better icons and red text</html>",
selectedVariantIndex: 0,
inputs: { inputs: {
prompt: "make text red", prompt: "make text red",
}, },
}, },
}; ];
const basicLinearHistoryWithCode: Record<CommitHash, Commit> = { const basicLinearHistoryWithCode: History = [
"0": { {
hash: "0",
dateCreated: new Date(),
isCommitted: false,
type: "code_create", type: "code_create",
parentHash: null, parentIndex: null,
variants: [{ code: "<html>1. create</html>" }], code: "<html>1. create</html>",
selectedVariantIndex: 0, inputs: {
inputs: null, code: "<html>1. create</html>",
}, },
...Object.fromEntries(Object.entries(basicLinearHistory).slice(1)), },
}; ...basicLinearHistory.slice(1),
];
const basicBranchingHistory: Record<CommitHash, Commit> = { const basicBranchingHistory: History = [
...basicLinearHistory, ...basicLinearHistory,
"3": { {
hash: "3",
dateCreated: new Date(),
isCommitted: false,
type: "ai_edit", type: "ai_edit",
parentHash: "1", parentIndex: 1,
variants: [ code: "<html>4. edit with better icons and green text</html>",
{ code: "<html>4. edit with better icons and green text</html>" },
],
selectedVariantIndex: 0,
inputs: { inputs: {
prompt: "make text green", prompt: "make text green",
}, },
}, },
}; ];
const longerBranchingHistory: Record<CommitHash, Commit> = { const longerBranchingHistory: History = [
...basicBranchingHistory, ...basicBranchingHistory,
"4": { {
hash: "4",
dateCreated: new Date(),
isCommitted: false,
type: "ai_edit", type: "ai_edit",
parentHash: "3", parentIndex: 3,
variants: [ code: "<html>5. edit with better icons and green, bold text</html>",
{ code: "<html>5. edit with better icons and green, bold text</html>" },
],
selectedVariantIndex: 0,
inputs: { inputs: {
prompt: "make text bold", prompt: "make text bold",
}, },
}, },
}; ];
const basicBadHistory: Record<CommitHash, Commit> = { const basicBadHistory: History = [
"0": { {
hash: "0",
dateCreated: new Date(),
isCommitted: false,
type: "ai_create", type: "ai_create",
parentHash: null, parentIndex: null,
variants: [{ code: "<html>1. create</html>" }], code: "<html>1. create</html>",
selectedVariantIndex: 0,
inputs: { inputs: {
image_url: "", image_url: "",
}, },
}, },
"1": { {
hash: "1",
dateCreated: new Date(),
isCommitted: false,
type: "ai_edit", type: "ai_edit",
parentHash: "2", // <- Bad parent hash parentIndex: 2, // <- Bad parent index
variants: [{ code: "<html>2. edit with better icons</html>" }], code: "<html>2. edit with better icons</html>",
selectedVariantIndex: 0,
inputs: { inputs: {
prompt: "use better icons", prompt: "use better icons",
}, },
}, },
}; ];
describe("History Utils", () => { describe("History Utils", () => {
test("should correctly extract the history tree", () => { test("should correctly extract the history tree", () => {
expect(extractHistory("2", basicLinearHistory)).toEqual([ expect(extractHistoryTree(basicLinearHistory, 2)).toEqual([
"<html>1. create</html>", "<html>1. create</html>",
"use better icons", "use better icons",
"<html>2. edit with better icons</html>", "<html>2. edit with better icons</html>",
@ -127,12 +93,12 @@ describe("History Utils", () => {
"<html>3. edit with better icons and red text</html>", "<html>3. edit with better icons and red text</html>",
]); ]);
expect(extractHistory("0", basicLinearHistory)).toEqual([ expect(extractHistoryTree(basicLinearHistory, 0)).toEqual([
"<html>1. create</html>", "<html>1. create</html>",
]); ]);
// Test branching // Test branching
expect(extractHistory("3", basicBranchingHistory)).toEqual([ expect(extractHistoryTree(basicBranchingHistory, 3)).toEqual([
"<html>1. create</html>", "<html>1. create</html>",
"use better icons", "use better icons",
"<html>2. edit with better icons</html>", "<html>2. edit with better icons</html>",
@ -140,7 +106,7 @@ describe("History Utils", () => {
"<html>4. edit with better icons and green text</html>", "<html>4. edit with better icons and green text</html>",
]); ]);
expect(extractHistory("4", longerBranchingHistory)).toEqual([ expect(extractHistoryTree(longerBranchingHistory, 4)).toEqual([
"<html>1. create</html>", "<html>1. create</html>",
"use better icons", "use better icons",
"<html>2. edit with better icons</html>", "<html>2. edit with better icons</html>",
@ -150,7 +116,7 @@ describe("History Utils", () => {
"<html>5. edit with better icons and green, bold text</html>", "<html>5. edit with better icons and green, bold text</html>",
]); ]);
expect(extractHistory("2", longerBranchingHistory)).toEqual([ expect(extractHistoryTree(longerBranchingHistory, 2)).toEqual([
"<html>1. create</html>", "<html>1. create</html>",
"use better icons", "use better icons",
"<html>2. edit with better icons</html>", "<html>2. edit with better icons</html>",
@ -160,82 +126,105 @@ describe("History Utils", () => {
// Errors // Errors
// Bad hash // Bad index
expect(() => extractHistory("100", basicLinearHistory)).toThrow(); expect(() => extractHistoryTree(basicLinearHistory, 100)).toThrow();
expect(() => extractHistoryTree(basicLinearHistory, -2)).toThrow();
// Bad tree // Bad tree
expect(() => extractHistory("1", basicBadHistory)).toThrow(); expect(() => extractHistoryTree(basicBadHistory, 1)).toThrow();
}); });
test("should correctly render the history tree", () => { test("should correctly render the history tree", () => {
expect(renderHistory(Object.values(basicLinearHistory))).toEqual([ expect(renderHistory(basicLinearHistory, 2)).toEqual([
{ {
...basicLinearHistory["0"], isActive: false,
type: "Create", parentVersion: null,
summary: "Create", summary: "Create",
parentVersion: null, type: "Create",
}, },
{ {
...basicLinearHistory["1"], isActive: false,
type: "Edit", parentVersion: null,
summary: "use better icons", summary: "use better icons",
parentVersion: null, type: "Edit",
}, },
{ {
...basicLinearHistory["2"], isActive: true,
type: "Edit",
summary: "make text red",
parentVersion: null, parentVersion: null,
summary: "make text red",
type: "Edit",
},
]);
// Current version is the first version
expect(renderHistory(basicLinearHistory, 0)).toEqual([
{
isActive: true,
parentVersion: null,
summary: "Create",
type: "Create",
},
{
isActive: false,
parentVersion: null,
summary: "use better icons",
type: "Edit",
},
{
isActive: false,
parentVersion: null,
summary: "make text red",
type: "Edit",
}, },
]); ]);
// Render a history with code // Render a history with code
expect(renderHistory(Object.values(basicLinearHistoryWithCode))).toEqual([ expect(renderHistory(basicLinearHistoryWithCode, 0)).toEqual([
{ {
...basicLinearHistoryWithCode["0"], isActive: true,
type: "Imported from code", parentVersion: null,
summary: "Imported from code", summary: "Imported from code",
parentVersion: null, type: "Imported from code",
}, },
{ {
...basicLinearHistoryWithCode["1"], isActive: false,
type: "Edit", parentVersion: null,
summary: "use better icons", summary: "use better icons",
parentVersion: null, type: "Edit",
}, },
{ {
...basicLinearHistoryWithCode["2"], isActive: false,
type: "Edit",
summary: "make text red",
parentVersion: null, parentVersion: null,
summary: "make text red",
type: "Edit",
}, },
]); ]);
// Render a non-linear history // Render a non-linear history
expect(renderHistory(Object.values(basicBranchingHistory))).toEqual([ expect(renderHistory(basicBranchingHistory, 3)).toEqual([
{ {
...basicBranchingHistory["0"], isActive: false,
type: "Create", parentVersion: null,
summary: "Create", summary: "Create",
parentVersion: null, type: "Create",
}, },
{ {
...basicBranchingHistory["1"], isActive: false,
type: "Edit", parentVersion: null,
summary: "use better icons", summary: "use better icons",
parentVersion: null, type: "Edit",
}, },
{ {
...basicBranchingHistory["2"], isActive: false,
type: "Edit", parentVersion: null,
summary: "make text red", summary: "make text red",
parentVersion: null, type: "Edit",
}, },
{ {
...basicBranchingHistory["3"], isActive: true,
type: "Edit", parentVersion: "v2",
summary: "make text green", summary: "make text green",
parentVersion: 2, type: "Edit",
}, },
]); ]);
}); });

View File

@ -1,25 +1,33 @@
import { Commit, CommitHash, CommitType } from "../commits/types"; import {
History,
HistoryItem,
HistoryItemType,
RenderedHistoryItem,
} from "./history_types";
export function extractHistory( export function extractHistoryTree(
hash: CommitHash, history: History,
commits: Record<CommitHash, Commit> version: number
): string[] { ): string[] {
const flatHistory: string[] = []; const flatHistory: string[] = [];
let currentCommitHash: CommitHash | null = hash; let currentIndex: number | null = version;
while (currentCommitHash !== null) { while (currentIndex !== null) {
const commit: Commit | null = commits[currentCommitHash]; const item: HistoryItem = history[currentIndex];
if (commit) { if (item) {
flatHistory.unshift(commit.variants[commit.selectedVariantIndex].code); if (item.type === "ai_create") {
// Don't include the image for ai_create
// For edits, add the prompt to the history flatHistory.unshift(item.code);
if (commit.type === "ai_edit") { } else if (item.type === "ai_edit") {
flatHistory.unshift(commit.inputs.prompt); flatHistory.unshift(item.code);
flatHistory.unshift(item.inputs.prompt);
} else if (item.type === "code_create") {
flatHistory.unshift(item.code);
} }
// Move to the parent of the current item // Move to the parent of the current item
currentCommitHash = commit.parentHash; currentIndex = item.parentIndex;
} else { } else {
throw new Error("Malformed history: missing parent index"); throw new Error("Malformed history: missing parent index");
} }
@ -28,7 +36,7 @@ export function extractHistory(
return flatHistory; return flatHistory;
} }
function displayHistoryItemType(itemType: CommitType) { function displayHistoryItemType(itemType: HistoryItemType) {
switch (itemType) { switch (itemType) {
case "ai_create": case "ai_create":
return "Create"; return "Create";
@ -43,48 +51,44 @@ function displayHistoryItemType(itemType: CommitType) {
} }
} }
const setParentVersion = (commit: Commit, history: Commit[]) => { function summarizeHistoryItem(item: HistoryItem) {
// If the commit has no parent, return null const itemType = item.type;
if (!commit.parentHash) return null; switch (itemType) {
const parentIndex = history.findIndex(
(item) => item.hash === commit.parentHash
);
const currentIndex = history.findIndex((item) => item.hash === commit.hash);
// Only set parent version if the parent is not the previous commit
// and parent exists
return parentIndex !== -1 && parentIndex != currentIndex - 1
? parentIndex + 1
: null;
};
export function summarizeHistoryItem(commit: Commit) {
const commitType = commit.type;
switch (commitType) {
case "ai_create": case "ai_create":
return "Create"; return "Create";
case "ai_edit": case "ai_edit":
return commit.inputs.prompt; return item.inputs.prompt;
case "code_create": case "code_create":
return "Imported from code"; return "Imported from code";
default: { default: {
const exhaustiveCheck: never = commitType; const exhaustiveCheck: never = itemType;
throw new Error(`Unhandled case: ${exhaustiveCheck}`); throw new Error(`Unhandled case: ${exhaustiveCheck}`);
} }
} }
} }
export const renderHistory = (history: Commit[]) => { export const renderHistory = (
const renderedHistory = []; history: History,
currentVersion: number | null
) => {
const renderedHistory: RenderedHistoryItem[] = [];
for (let i = 0; i < history.length; i++) { for (let i = 0; i < history.length; i++) {
const commit = history[i]; const item = history[i];
// Only show the parent version if it's not the previous version
// (i.e. it's the branching point) and if it's not the first version
const parentVersion =
item.parentIndex !== null && item.parentIndex !== i - 1
? `v${(item.parentIndex || 0) + 1}`
: null;
const type = displayHistoryItemType(item.type);
const isActive = i === currentVersion;
const summary = summarizeHistoryItem(item);
renderedHistory.push({ renderedHistory.push({
...commit, isActive,
type: displayHistoryItemType(commit.type), summary: summary,
summary: summarizeHistoryItem(commit), parentVersion,
parentVersion: setParentVersion(commit, history), type,
}); });
} }

View File

@ -1,16 +0,0 @@
import React from "react";
interface DeprecationMessageProps {}
const DeprecationMessage: React.FC<DeprecationMessageProps> = () => {
return (
<div className="rounded-lg p-2 bg-fuchsia-200">
<p className="text-gray-800 text-sm">
We no longer support this model. Instead, code generation will use
GPT-4o or Claude Sonnet 3.5, the 2 state-of-the-art models.
</p>
</div>
);
};
export default DeprecationMessage;

View File

@ -1,99 +0,0 @@
import { Tabs, TabsList, TabsTrigger, TabsContent } from "../ui/tabs";
import {
FaUndo,
FaDownload,
FaDesktop,
FaMobile,
FaCode,
} from "react-icons/fa";
import { AppState, Settings } from "../../types";
import CodeTab from "./CodeTab";
import { Button } from "../ui/button";
import { useAppStore } from "../../store/app-store";
import { useProjectStore } from "../../store/project-store";
import { extractHtml } from "./extractHtml";
import PreviewComponent from "./PreviewComponent";
import { downloadCode } from "./download";
interface Props {
doUpdate: (instruction: string) => void;
reset: () => void;
settings: Settings;
}
function PreviewPane({ doUpdate, reset, settings }: Props) {
const { appState } = useAppStore();
const { inputMode, head, commits } = useProjectStore();
const currentCommit = head && commits[head] ? commits[head] : "";
const currentCode = currentCommit
? currentCommit.variants[currentCommit.selectedVariantIndex].code
: "";
const previewCode =
inputMode === "video" && appState === AppState.CODING
? extractHtml(currentCode)
: currentCode;
return (
<div className="ml-4">
<Tabs defaultValue="desktop">
<div className="flex justify-between mr-8 mb-4">
<div className="flex items-center gap-x-2">
{appState === AppState.CODE_READY && (
<>
<Button
onClick={reset}
className="flex items-center ml-4 gap-x-2 dark:text-white dark:bg-gray-700"
>
<FaUndo />
Reset
</Button>
<Button
onClick={() => downloadCode(previewCode)}
variant="secondary"
className="flex items-center gap-x-2 mr-4 dark:text-white dark:bg-gray-700 download-btn"
>
<FaDownload /> Download
</Button>
</>
)}
</div>
<div className="flex items-center">
<TabsList>
<TabsTrigger value="desktop" className="flex gap-x-2">
<FaDesktop /> Desktop
</TabsTrigger>
<TabsTrigger value="mobile" className="flex gap-x-2">
<FaMobile /> Mobile
</TabsTrigger>
<TabsTrigger value="code" className="flex gap-x-2">
<FaCode />
Code
</TabsTrigger>
</TabsList>
</div>
</div>
<TabsContent value="desktop">
<PreviewComponent
code={previewCode}
device="desktop"
doUpdate={doUpdate}
/>
</TabsContent>
<TabsContent value="mobile">
<PreviewComponent
code={previewCode}
device="mobile"
doUpdate={doUpdate}
/>
</TabsContent>
<TabsContent value="code">
<CodeTab code={previewCode} setCode={() => {}} settings={settings} />
</TabsContent>
</Tabs>
</div>
);
}
export default PreviewPane;

View File

@ -1,16 +0,0 @@
export const downloadCode = (code: string) => {
// Create a blob from the generated code
const blob = new Blob([code], { type: "text/html" });
const url = URL.createObjectURL(blob);
// Create an anchor element and set properties for download
const a = document.createElement("a");
a.href = url;
a.download = "index.html"; // Set the file name for download
document.body.appendChild(a); // Append to the document
a.click(); // Programmatically click the anchor to trigger download
// Clean up by removing the anchor and revoking the Blob URL
document.body.removeChild(a);
URL.revokeObjectURL(url);
};

View File

@ -3,7 +3,6 @@ import { Textarea } from "../ui/textarea";
import { Button } from "../ui/button"; import { Button } from "../ui/button";
import { addHighlight, getAdjustedCoordinates, removeHighlight } from "./utils"; import { addHighlight, getAdjustedCoordinates, removeHighlight } from "./utils";
import { useAppStore } from "../../store/app-store"; import { useAppStore } from "../../store/app-store";
import KeyboardShortcutBadge from "../core/KeyboardShortcutBadge";
interface EditPopupProps { interface EditPopupProps {
event: MouseEvent | null; event: MouseEvent | null;
@ -119,7 +118,7 @@ const EditPopup: React.FC<EditPopupProps> = ({
return ( return (
<div <div
className="absolute bg-white dark:bg-gray-800 p-4 border border-gray-300 dark:border-gray-600 rounded shadow-lg w-60" className="absolute bg-white p-4 border border-gray-300 rounded shadow-lg w-60"
style={{ top: popupPosition.y, left: popupPosition.x }} style={{ top: popupPosition.y, left: popupPosition.x }}
> >
<Textarea <Textarea
@ -127,7 +126,6 @@ const EditPopup: React.FC<EditPopupProps> = ({
value={updateText} value={updateText}
onChange={(e) => setUpdateText(e.target.value)} onChange={(e) => setUpdateText(e.target.value)}
placeholder="Tell the AI what to change about this element..." placeholder="Tell the AI what to change about this element..."
className="dark:bg-gray-700 dark:text-white"
onKeyDown={(e) => { onKeyDown={(e) => {
if (e.key === "Enter") { if (e.key === "Enter") {
e.preventDefault(); e.preventDefault();
@ -136,12 +134,7 @@ const EditPopup: React.FC<EditPopupProps> = ({
}} }}
/> />
<div className="flex justify-end mt-2"> <div className="flex justify-end mt-2">
<Button <Button onClick={() => onUpdate(updateText)}>Update</Button>
className="dark:bg-gray-700 dark:text-white"
onClick={() => onUpdate(updateText)}
>
Update <KeyboardShortcutBadge letter="enter" />
</Button>
</div> </div>
</div> </div>
); );

View File

@ -1,37 +0,0 @@
import React from "react";
import { useAppStore } from "../../store/app-store";
import { AppState, Settings } from "../../types";
import OutputSettingsSection from "./OutputSettingsSection";
import { Stack } from "../../lib/stacks";
interface GenerationSettingsProps {
settings: Settings;
setSettings: React.Dispatch<React.SetStateAction<Settings>>;
}
export const GenerationSettings: React.FC<GenerationSettingsProps> = ({
settings,
setSettings,
}) => {
const { appState } = useAppStore();
function setStack(stack: Stack) {
setSettings((prev: Settings) => ({
...prev,
generatedCodeConfig: stack,
}));
}
const shouldDisableUpdates =
appState === AppState.CODING || appState === AppState.CODE_READY;
return (
<div className="flex flex-col gap-y-2">
<OutputSettingsSection
stack={settings.generatedCodeConfig}
setStack={setStack}
shouldDisableUpdates={shouldDisableUpdates}
/>
</div>
);
};

View File

@ -1,196 +0,0 @@
import { Switch } from "@radix-ui/react-switch";
import classNames from "classnames";
import { useAppStore } from "../../store/app-store";
import { useProjectStore } from "../../store/project-store";
import { AppState } from "../../types";
import CodePreview from "../preview/CodePreview";
import Spinner from "../core/Spinner";
import KeyboardShortcutBadge from "../core/KeyboardShortcutBadge";
import TipLink from "../messages/TipLink";
import SelectAndEditModeToggleButton from "../select-and-edit/SelectAndEditModeToggleButton";
import { Button } from "../ui/button";
import { Textarea } from "../ui/textarea";
import { useEffect, useRef } from "react";
import HistoryDisplay from "../history/HistoryDisplay";
import Variants from "../variants/Variants";
interface SidebarProps {
showSelectAndEditFeature: boolean;
doUpdate: (instruction: string) => void;
regenerate: () => void;
cancelCodeGeneration: () => void;
}
function Sidebar({
showSelectAndEditFeature,
doUpdate,
regenerate,
cancelCodeGeneration,
}: SidebarProps) {
const textareaRef = useRef<HTMLTextAreaElement>(null);
const {
appState,
updateInstruction,
setUpdateInstruction,
shouldIncludeResultImage,
setShouldIncludeResultImage,
} = useAppStore();
const { inputMode, referenceImages, executionConsoles, head, commits } =
useProjectStore();
const viewedCode =
head && commits[head]
? commits[head].variants[commits[head].selectedVariantIndex].code
: "";
const executionConsole =
(head && executionConsoles[commits[head].selectedVariantIndex]) || [];
// When coding is complete, focus on the update instruction textarea
useEffect(() => {
if (appState === AppState.CODE_READY && textareaRef.current) {
textareaRef.current.focus();
}
}, [appState]);
return (
<>
<Variants />
{/* Show code preview only when coding */}
{appState === AppState.CODING && (
<div className="flex flex-col">
{/* Speed disclaimer for video mode */}
{inputMode === "video" && (
<div
className="bg-yellow-100 border-l-4 border-yellow-500 text-yellow-700
p-2 text-xs mb-4 mt-1"
>
Code generation from videos can take 3-4 minutes. We do multiple
passes to get the best result. Please be patient.
</div>
)}
<div className="flex items-center gap-x-1">
<Spinner />
{executionConsole.slice(-1)[0]}
</div>
<CodePreview code={viewedCode} />
<div className="flex w-full">
<Button
onClick={cancelCodeGeneration}
className="w-full dark:text-white dark:bg-gray-700"
>
Cancel
</Button>
</div>
</div>
)}
{appState === AppState.CODE_READY && (
<div>
<div className="grid w-full gap-2">
<Textarea
ref={textareaRef}
placeholder="Tell the AI what to change..."
onChange={(e) => setUpdateInstruction(e.target.value)}
onKeyDown={(e) => {
if (e.key === "Enter" && !e.shiftKey) {
doUpdate(updateInstruction);
}
}}
value={updateInstruction}
/>
<div className="flex justify-between items-center gap-x-2">
<div className="font-500 text-xs text-slate-700 dark:text-white">
Include screenshot of current version?
</div>
<Switch
checked={shouldIncludeResultImage}
onCheckedChange={setShouldIncludeResultImage}
className="dark:bg-gray-700"
/>
</div>
<Button
onClick={() => doUpdate(updateInstruction)}
className="dark:text-white dark:bg-gray-700 update-btn"
>
Update <KeyboardShortcutBadge letter="enter" />
</Button>
</div>
<div className="flex items-center justify-end gap-x-2 mt-2">
<Button
onClick={regenerate}
className="flex items-center gap-x-2 dark:text-white dark:bg-gray-700 regenerate-btn"
>
🔄 Regenerate
</Button>
{showSelectAndEditFeature && <SelectAndEditModeToggleButton />}
</div>
<div className="flex justify-end items-center mt-2">
<TipLink />
</div>
</div>
)}
{/* Reference image display */}
<div className="flex gap-x-2 mt-2">
{referenceImages.length > 0 && (
<div className="flex flex-col">
<div
className={classNames({
"scanning relative": appState === AppState.CODING,
})}
>
{inputMode === "image" && (
<img
className="w-[340px] border border-gray-200 rounded-md"
src={referenceImages[0]}
alt="Reference"
/>
)}
{inputMode === "video" && (
<video
muted
autoPlay
loop
className="w-[340px] border border-gray-200 rounded-md"
src={referenceImages[0]}
/>
)}
</div>
<div className="text-gray-400 uppercase text-sm text-center mt-1">
{inputMode === "video" ? "Original Video" : "Original Screenshot"}
</div>
</div>
)}
<div className="bg-gray-400 px-4 py-2 rounded text-sm hidden">
<h2 className="text-lg mb-4 border-b border-gray-800">Console</h2>
{Object.entries(executionConsoles).map(([index, lines]) => (
<div key={index}>
{lines.map((line, lineIndex) => (
<div
key={`${index}-${lineIndex}`}
className="border-b border-gray-400 mb-2 text-gray-600 font-mono"
>
<span className="font-bold mr-2">{`${index}:${
lineIndex + 1
}`}</span>
{line}
</div>
))}
</div>
))}
</div>
</div>
<HistoryDisplay shouldDisableReverts={appState === AppState.CODING} />
</>
);
}
export default Sidebar;

View File

@ -1,27 +0,0 @@
import React from "react";
import ImageUpload from "../ImageUpload";
import { UrlInputSection } from "../UrlInputSection";
import ImportCodeSection from "../ImportCodeSection";
import { Settings } from "../../types";
import { Stack } from "../../lib/stacks";
interface Props {
doCreate: (images: string[], inputMode: "image" | "video") => void;
importFromCode: (code: string, stack: Stack) => void;
settings: Settings;
}
const StartPane: React.FC<Props> = ({ doCreate, importFromCode, settings }) => {
return (
<div className="flex flex-col justify-center items-center gap-y-10">
<ImageUpload setReferenceImages={doCreate} />
<UrlInputSection
doCreate={doCreate}
screenshotOneApiKey={settings.screenshotOneApiKey}
/>
<ImportCodeSection importFromCode={importFromCode} />
</div>
);
};
export default StartPane;

View File

@ -1,42 +0,0 @@
import { useProjectStore } from "../../store/project-store";
function Variants() {
const { inputMode, head, commits, updateSelectedVariantIndex } =
useProjectStore();
// If there is no head, don't show the variants
if (head === null) {
return null;
}
const commit = commits[head];
const variants = commit.variants;
const selectedVariantIndex = commit.selectedVariantIndex;
// If there is only one variant or the commit is already committed, don't show the variants
if (variants.length <= 1 || commit.isCommitted || inputMode === "video") {
return <div className="mt-2"></div>;
}
return (
<div className="mt-4 mb-4">
<div className="grid grid-cols-2 gap-2">
{variants.map((_, index) => (
<div
key={index}
className={`p-2 border rounded-md cursor-pointer ${
index === selectedVariantIndex
? "bg-blue-100 dark:bg-blue-900"
: "bg-gray-50 dark:bg-gray-800 hover:bg-gray-100 dark:hover:bg-gray-700"
}`}
onClick={() => updateSelectedVariantIndex(head, index)}
>
<h3 className="font-medium mb-1">Option {index + 1}</h3>
</div>
))}
</div>
</div>
);
}
export default Variants;

View File

@ -11,18 +11,12 @@ const ERROR_MESSAGE =
const CANCEL_MESSAGE = "Code generation cancelled"; const CANCEL_MESSAGE = "Code generation cancelled";
type WebSocketResponse = {
type: "chunk" | "status" | "setCode" | "error";
value: string;
variantIndex: number;
};
export function generateCode( export function generateCode(
wsRef: React.MutableRefObject<WebSocket | null>, wsRef: React.MutableRefObject<WebSocket | null>,
params: FullGenerationSettings, params: FullGenerationSettings,
onChange: (chunk: string, variantIndex: number) => void, onChange: (chunk: string) => void,
onSetCode: (code: string, variantIndex: number) => void, onSetCode: (code: string) => void,
onStatusUpdate: (status: string, variantIndex: number) => void, onStatusUpdate: (status: string) => void,
onCancel: () => void, onCancel: () => void,
onComplete: () => void onComplete: () => void
) { ) {
@ -37,13 +31,13 @@ export function generateCode(
}); });
ws.addEventListener("message", async (event: MessageEvent) => { ws.addEventListener("message", async (event: MessageEvent) => {
const response = JSON.parse(event.data) as WebSocketResponse; const response = JSON.parse(event.data);
if (response.type === "chunk") { if (response.type === "chunk") {
onChange(response.value, response.variantIndex); onChange(response.value);
} else if (response.type === "status") { } else if (response.type === "status") {
onStatusUpdate(response.value, response.variantIndex); onStatusUpdate(response.value);
} else if (response.type === "setCode") { } else if (response.type === "setCode") {
onSetCode(response.value, response.variantIndex); onSetCode(response.value);
} else if (response.type === "error") { } else if (response.type === "error") {
console.error("Error generating code", response.value); console.error("Error generating code", response.value);
toast.error(response.value); toast.error(response.value);

View File

@ -12,9 +12,9 @@ export enum CodeGenerationModel {
export const CODE_GENERATION_MODEL_DESCRIPTIONS: { export const CODE_GENERATION_MODEL_DESCRIPTIONS: {
[key in CodeGenerationModel]: { name: string; inBeta: boolean }; [key in CodeGenerationModel]: { name: string; inBeta: boolean };
} = { } = {
"gpt-4o-2024-05-13": { name: "GPT-4o", inBeta: false }, "gpt-4o-2024-05-13": { name: "GPT-4o 🌟", inBeta: false },
"claude-3-5-sonnet-20240620": { name: "Claude 3.5 Sonnet", inBeta: false }, "claude-3-5-sonnet-20240620": { name: "Claude 3.5 Sonnet 🌟", inBeta: false },
"gpt-4-turbo-2024-04-09": { name: "GPT-4 Turbo (deprecated)", inBeta: false }, "gpt-4-turbo-2024-04-09": { name: "GPT-4 Turbo (Apr 2024)", inBeta: false },
gpt_4_vision: { name: "GPT-4 Vision (deprecated)", inBeta: false }, gpt_4_vision: { name: "GPT-4 Vision (Nov 2023)", inBeta: false },
claude_3_sonnet: { name: "Claude 3 (deprecated)", inBeta: false }, claude_3_sonnet: { name: "Claude 3 Sonnet", inBeta: false },
}; };

View File

@ -1,14 +0,0 @@
import html2canvas from "html2canvas";
export const takeScreenshot = async (): Promise<string> => {
const iframeElement = document.querySelector(
"#preview-desktop"
) as HTMLIFrameElement;
if (!iframeElement?.contentWindow?.document.body) {
return "";
}
const canvas = await html2canvas(iframeElement.contentWindow.document.body);
const png = canvas.toDataURL("image/png");
return png;
};

View File

@ -1,34 +1,13 @@
import { create } from "zustand"; import { create } from "zustand";
import { AppState } from "../types";
// Store for app-wide state // Store for app-wide state
interface AppStore { interface AppStore {
appState: AppState;
setAppState: (state: AppState) => void;
// UI state
updateInstruction: string;
setUpdateInstruction: (instruction: string) => void;
shouldIncludeResultImage: boolean;
setShouldIncludeResultImage: (shouldInclude: boolean) => void;
inSelectAndEditMode: boolean; inSelectAndEditMode: boolean;
toggleInSelectAndEditMode: () => void; toggleInSelectAndEditMode: () => void;
disableInSelectAndEditMode: () => void; disableInSelectAndEditMode: () => void;
} }
export const useAppStore = create<AppStore>((set) => ({ export const useAppStore = create<AppStore>((set) => ({
appState: AppState.INITIAL,
setAppState: (state: AppState) => set({ appState: state }),
// UI state
updateInstruction: "",
setUpdateInstruction: (instruction: string) =>
set({ updateInstruction: instruction }),
shouldIncludeResultImage: true,
setShouldIncludeResultImage: (shouldInclude: boolean) =>
set({ shouldIncludeResultImage: shouldInclude }),
inSelectAndEditMode: false, inSelectAndEditMode: false,
toggleInSelectAndEditMode: () => toggleInSelectAndEditMode: () =>
set((state) => ({ inSelectAndEditMode: !state.inSelectAndEditMode })), set((state) => ({ inSelectAndEditMode: !state.inSelectAndEditMode })),

View File

@ -1,149 +0,0 @@
import { create } from "zustand";
import { Commit, CommitHash } from "../components/commits/types";
// Store for app-wide state
interface ProjectStore {
// Inputs
inputMode: "image" | "video";
setInputMode: (mode: "image" | "video") => void;
isImportedFromCode: boolean;
setIsImportedFromCode: (imported: boolean) => void;
referenceImages: string[];
setReferenceImages: (images: string[]) => void;
// Outputs
commits: Record<string, Commit>;
head: CommitHash | null;
addCommit: (commit: Commit) => void;
removeCommit: (hash: CommitHash) => void;
resetCommits: () => void;
appendCommitCode: (
hash: CommitHash,
numVariant: number,
code: string
) => void;
setCommitCode: (hash: CommitHash, numVariant: number, code: string) => void;
updateSelectedVariantIndex: (hash: CommitHash, index: number) => void;
setHead: (hash: CommitHash) => void;
resetHead: () => void;
executionConsoles: { [key: number]: string[] };
appendExecutionConsole: (variantIndex: number, line: string) => void;
resetExecutionConsoles: () => void;
}
export const useProjectStore = create<ProjectStore>((set) => ({
// Inputs and their setters
inputMode: "image",
setInputMode: (mode) => set({ inputMode: mode }),
isImportedFromCode: false,
setIsImportedFromCode: (imported) => set({ isImportedFromCode: imported }),
referenceImages: [],
setReferenceImages: (images) => set({ referenceImages: images }),
// Outputs
commits: {},
head: null,
addCommit: (commit: Commit) => {
// When adding a new commit, make sure all existing commits are marked as committed
set((state) => ({
commits: {
...Object.fromEntries(
Object.entries(state.commits).map(([hash, existingCommit]) => [
hash,
{ ...existingCommit, isCommitted: true },
])
),
[commit.hash]: commit,
},
}));
},
removeCommit: (hash: CommitHash) => {
set((state) => {
const newCommits = { ...state.commits };
delete newCommits[hash];
return { commits: newCommits };
});
},
resetCommits: () => set({ commits: {} }),
appendCommitCode: (hash: CommitHash, numVariant: number, code: string) =>
set((state) => {
const commit = state.commits[hash];
// Don't update if the commit is already committed
if (commit.isCommitted) {
throw new Error("Attempted to append code to a committed commit");
}
return {
commits: {
...state.commits,
[hash]: {
...commit,
variants: commit.variants.map((variant, index) =>
index === numVariant
? { ...variant, code: variant.code + code }
: variant
),
},
},
};
}),
setCommitCode: (hash: CommitHash, numVariant: number, code: string) =>
set((state) => {
const commit = state.commits[hash];
// Don't update if the commit is already committed
if (commit.isCommitted) {
throw new Error("Attempted to set code of a committed commit");
}
return {
commits: {
...state.commits,
[hash]: {
...commit,
variants: commit.variants.map((variant, index) =>
index === numVariant ? { ...variant, code } : variant
),
},
},
};
}),
updateSelectedVariantIndex: (hash: CommitHash, index: number) =>
set((state) => {
const commit = state.commits[hash];
// Don't update if the commit is already committed
if (commit.isCommitted) {
throw new Error(
"Attempted to update selected variant index of a committed commit"
);
}
return {
commits: {
...state.commits,
[hash]: {
...commit,
selectedVariantIndex: index,
},
},
};
}),
setHead: (hash: CommitHash) => set({ head: hash }),
resetHead: () => set({ head: null }),
executionConsoles: {},
appendExecutionConsole: (variantIndex: number, line: string) =>
set((state) => ({
executionConsoles: {
...state.executionConsoles,
[variantIndex]: [
...(state.executionConsoles[variantIndex] || []),
line,
],
},
})),
resetExecutionConsoles: () => set({ executionConsoles: {} }),
}));

View File

@ -16,16 +16,14 @@ describe("e2e tests", () => {
let browser: Browser; let browser: Browser;
let page: Page; let page: Page;
const DEBUG = true; const DEBUG = false;
const IS_HEADLESS = true; const IS_HEADLESS = true;
const stacks = Object.values(Stack).slice(0, DEBUG ? 1 : undefined); const stacks = Object.values(Stack).slice(0, DEBUG ? 1 : undefined);
const models = DEBUG const models = Object.values(CodeGenerationModel).slice(
? [ 0,
CodeGenerationModel.GPT_4O_2024_05_13, DEBUG ? 1 : undefined
CodeGenerationModel.CLAUDE_3_5_SONNET_2024_06_20, );
]
: Object.values(CodeGenerationModel);
beforeAll(async () => { beforeAll(async () => {
browser = await puppeteer.launch({ headless: IS_HEADLESS }); browser = await puppeteer.launch({ headless: IS_HEADLESS });

View File

@ -33,7 +33,7 @@ export enum ScreenRecorderState {
export interface CodeGenerationParams { export interface CodeGenerationParams {
generationType: "create" | "update"; generationType: "create" | "update";
inputMode: "image" | "video"; inputMode: "image" | "video" | "text";
image: string; image: string;
resultImage?: string; resultImage?: string;
history?: string[]; history?: string[];

View File

@ -4441,11 +4441,6 @@ nanoid@^3.3.6, nanoid@^3.3.7:
resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz" resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz"
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
nanoid@^5.0.7:
version "5.0.7"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-5.0.7.tgz#6452e8c5a816861fd9d2b898399f7e5fd6944cc6"
integrity sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==
natural-compare@^1.4.0: natural-compare@^1.4.0:
version "1.4.0" version "1.4.0"
resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz"