write logs to file
This commit is contained in:
parent
5be9736120
commit
86dd82e358
@ -22,7 +22,10 @@ async def stream_openai_response(messages, callback: Callable[[str], Awaitable[N
|
|||||||
params["temperature"] = 0
|
params["temperature"] = 0
|
||||||
|
|
||||||
completion = await client.chat.completions.create(**params)
|
completion = await client.chat.completions.create(**params)
|
||||||
|
full_response = ""
|
||||||
async for chunk in completion:
|
async for chunk in completion:
|
||||||
content = chunk.choices[0].delta.content or ""
|
content = chunk.choices[0].delta.content or ""
|
||||||
|
full_response += content
|
||||||
await callback(content)
|
await callback(content)
|
||||||
|
|
||||||
|
return full_response
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
# Load environment variables first
|
# Load environment variables first
|
||||||
|
import json
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
import os
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -14,6 +15,19 @@ from llm import stream_openai_response
|
|||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
|
def write_logs(prompt_messages, completion):
|
||||||
|
# Create run_logs directory if it doesn't exist
|
||||||
|
if not os.path.exists("run_logs"):
|
||||||
|
os.makedirs("run_logs")
|
||||||
|
|
||||||
|
# Generate a unique filename using the current timestamp
|
||||||
|
filename = datetime.now().strftime("run_logs/messages_%Y%m%d_%H%M%S.json")
|
||||||
|
|
||||||
|
# Write the messages dict into a new file for each run
|
||||||
|
with open(filename, "w") as f:
|
||||||
|
f.write(json.dumps({"prompt": prompt_messages, "completion": completion}))
|
||||||
|
|
||||||
|
|
||||||
@app.websocket("/generate-code")
|
@app.websocket("/generate-code")
|
||||||
async def stream_code_test(websocket: WebSocket):
|
async def stream_code_test(websocket: WebSocket):
|
||||||
await websocket.accept()
|
await websocket.accept()
|
||||||
@ -23,21 +37,12 @@ async def stream_code_test(websocket: WebSocket):
|
|||||||
async def process_chunk(content):
|
async def process_chunk(content):
|
||||||
await websocket.send_json({"type": "chunk", "value": content})
|
await websocket.send_json({"type": "chunk", "value": content})
|
||||||
|
|
||||||
messages = assemble_prompt("")
|
prompt_messages = assemble_prompt("")
|
||||||
print(messages)
|
|
||||||
|
|
||||||
# Create run_logs directory if it doesn't exist
|
completion = await stream_openai_response(
|
||||||
if not os.path.exists('run_logs'):
|
prompt_messages,
|
||||||
os.makedirs('run_logs')
|
|
||||||
|
|
||||||
# Generate a unique filename using the current timestamp
|
|
||||||
filename = datetime.now().strftime('run_logs/messages_%Y%m%d_%H%M%S.json')
|
|
||||||
|
|
||||||
# Write the messages dict into a new file for each run
|
|
||||||
with open(filename, "w") as f:
|
|
||||||
f.write(str(messages))
|
|
||||||
|
|
||||||
await stream_openai_response(
|
|
||||||
messages,
|
|
||||||
lambda x: process_chunk(x),
|
lambda x: process_chunk(x),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Write the messages dict into a log so that we can debug later
|
||||||
|
write_logs(prompt_messages, completion)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user