-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
130 lines (98 loc) · 4.45 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
from openai import OpenAI
from fastapi import FastAPI, Form, Request, WebSocket, WebSocketDisconnect
from typing import Annotated
from fastapi.templating import Jinja2Templates
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
import os
from dotenv import load_dotenv
import uuid
load_dotenv()
openai = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
app = FastAPI()
templates = Jinja2Templates(directory="templates")
app.mount("/static", StaticFiles(directory="static"), name="static")
chat_sessions = {}
@app.get("/", response_class=HTMLResponse)
async def chat_page(request: Request):
return templates.TemplateResponse(
"home.html", {"request": request, "chat_responses": []}
)
@app.websocket("/ws")
async def chat(websocket: WebSocket):
await websocket.accept()
session_id = str(uuid.uuid4())
chat_sessions[session_id] = {
"responses": [],
"messages": [
{
"role": "system",
"content": "You are a versatile AI assistant designed to provide accurate and reliable information, assist with learning, problem-solving, and creative tasks, and adapt to user preferences. You break down complex topics into simple explanations, offer personalized solutions, and maintain a friendly, professional tone. Upholding ethical standards, you ensure safe and respectful interactions while empowering users with knowledge and inspiration",
}
],
}
try:
while True:
user_input = await websocket.receive_text()
chat_sessions[session_id]["messages"].append(
{"role": "user", "content": user_input}
)
response = openai.chat.completions.create(
model="gpt-3.5-turbo",
messages=chat_sessions[session_id]["messages"],
temperature=0.6,
stream=True,
)
ai_response = ""
for chunk in response:
if chunk.choices[0].delta.content is not None:
ai_response += chunk.choices[0].delta.content
await websocket.send_text(chunk.choices[0].delta.content)
if ai_response:
chat_sessions[session_id]["messages"].append(
{"role": "assistant", "content": ai_response}
)
chat_sessions[session_id]["responses"].append(ai_response)
except WebSocketDisconnect:
if session_id in chat_sessions:
del chat_sessions[session_id]
await websocket.close()
except Exception as e:
await websocket.send_text(f"Error: {str(e)}")
@app.post("/", response_class=HTMLResponse)
async def chat(request: Request, user_input: Annotated[str, Form()]):
session_id = str(uuid.uuid4())
chat_sessions[session_id] = {
"responses": [],
"messages": [
{
"role": "system",
"content": "You are a versatile AI assistant designed to provide accurate and reliable information, assist with learning, problem-solving, and creative tasks, and adapt to user preferences. You break down complex topics into simple explanations, offer personalized solutions, and maintain a friendly, professional tone. Upholding ethical standards, you ensure safe and respectful interactions while empowering users with knowledge and inspiration",
}
],
}
chat_sessions[session_id]["messages"].append(
{"role": "assistant", "content": user_input}
)
response = openai.chat.completions.create(
model="gpt-3.5-turbo",
messages=chat_sessions[session_id]["messages"],
temperature=0.6,
)
bot_response = response.choices[0].message.content
chat_sessions[session_id]["messages"].append(
{"role": "assistant", "content": bot_response}
)
return templates.TemplateResponse(
"home.html", {"request": request, "chat_responses": [bot_response]}
)
@app.get("/image", response_class=HTMLResponse)
async def image_page(request: Request):
return templates.TemplateResponse("image.html", {"request": request})
@app.post("/image", response_class=HTMLResponse)
async def create_image(request: Request, user_input: Annotated[str, Form()]):
response = openai.images.generate(prompt=user_input, n=1, size="1024x1024")
image_url = response.data[0].url
return templates.TemplateResponse(
"image.html", {"request": request, "image_url": image_url}
)