From 1ed9822f2fd86123650a1bf0e5161324ca8234f2 Mon Sep 17 00:00:00 2001 From: heshunme Date: Sat, 26 Oct 2024 15:56:38 +0800 Subject: [PATCH] =?UTF-8?q?1.=E6=9C=80=E5=B0=8F=E5=8C=96import=202.?= =?UTF-8?q?=E9=87=8D=E6=9E=84=E4=BB=A3=E7=A0=81=E7=BB=93=E6=9E=84=EF=BC=8C?= =?UTF-8?q?=E7=8B=AC=E7=AB=8Bconfig=E7=B1=BB=E4=B8=8EConnectionManager?= =?UTF-8?q?=E7=B1=BB=203.=E5=BC=95=E5=85=A5toml=E6=9D=A5=E7=AE=A1=E7=90=86?= =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6=204.=E5=BC=95=E5=85=A5pyda?= =?UTF-8?q?ntic=E7=9A=84basemodel=E6=9D=A5=E7=AE=80=E5=8C=96config?= =?UTF-8?q?=E7=B1=BB=E7=9A=84=E5=BB=BA=E6=9E=84=205.=E6=96=B0=E5=A2=9E?= =?UTF-8?q?=E4=BF=A1=E5=8F=B7=E5=93=8D=E5=BA=94=E7=B3=BB=E7=BB=9F=EF=BC=8C?= =?UTF-8?q?linux=E4=B8=8A=E5=8F=AF=E4=BB=A5=E5=93=8D=E5=BA=94systemctl=20r?= =?UTF-8?q?eload=E4=BA=86=206.=E4=BF=AE=E6=94=B9=E4=BA=86=E9=83=A8?= =?UTF-8?q?=E5=88=86=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ConnectionManager.py | 29 +++++++++++++++++ config.py | 24 ++++++++++++++ main.py | 75 +++++++++++++++++--------------------------- requirements.txt | 5 ++- 4 files changed, 85 insertions(+), 48 deletions(-) create mode 100644 ConnectionManager.py create mode 100644 config.py diff --git a/ConnectionManager.py b/ConnectionManager.py new file mode 100644 index 0000000..f44721d --- /dev/null +++ b/ConnectionManager.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# @Time : 2024/10/26 下午3:35 +# @Author : 河瞬 +# @FileName: ConnectionManager.py +# @Software: PyCharm +# @Github : +from typing import List +from fastapi import WebSocket + + +# WebSocket连接管理器 +class ConnectionManager: + def __init__(self): + self.active_connections: List[WebSocket] = [] + + async def connect(self, websocket: WebSocket): + await websocket.accept() + self.active_connections.append(websocket) + + def disconnect(self, websocket: WebSocket): + self.active_connections.remove(websocket) + + @staticmethod + async def send_personal_message(message: str, websocket: WebSocket): + await websocket.send_text(message) + + async def broadcast_json(self, data: dict): + for connection in self.active_connections: + await connection.send_json(data) diff --git a/config.py b/config.py new file mode 100644 index 0000000..d2b5834 --- /dev/null +++ b/config.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# @Time : 2024/10/26 下午3:25 +# @Author : 河瞬 +# @FileName: config.py +# @Software: PyCharm +import toml +from pydantic import BaseModel + + +class Config(BaseModel): + key: str = "" + prompt: str = "" + model: str = "chatgpt-4o-latest" + base_url: str = "" + + def save(self, config_file): + with open(config_file, "w", encoding='utf-8') as f: + toml.dump(self.model_dump(), f) + + @classmethod + def load(cls, config_file): + with open(config_file, "r", encoding='utf-8') as f: + config_data = toml.load(f) + return cls(**config_data) diff --git a/main.py b/main.py index b91395f..9d11ed9 100644 --- a/main.py +++ b/main.py @@ -1,60 +1,40 @@ from fastapi import FastAPI, WebSocket, WebSocketDisconnect, UploadFile, File from fastapi.responses import StreamingResponse, HTMLResponse -from typing import List -import base64 -import openai -import signal +from base64 import b64encode +from openai import OpenAI +from os.path import isfile +from sys import platform +from config import Config +from ConnectionManager import ConnectionManager app = FastAPI() -key = "" -prompt = "" -def get_key(): - with open("key", "r") as f: - k = f.read() - return k +def signal_handler(): + print("Received SIGHUP, reloading config") + global config, client + config.load(config_file) + client = OpenAI(api_key=config.key, base_url=config.base_url) -def get_prompt(): - with open("prompt", "r", encoding="utf-8") as f: - p = f.read() - return p +if platform != 'win32': + import signal + + signal.signal(signal.SIGHUP, lambda signum, frame: signal_handler()) -def load_config(): - global key, prompt - key = get_key() - prompt = get_prompt() +def init(): + global config + if not isfile(config_file): + config = Config() + config.save(config_file) -signal.signal(signal.SIGHUP, load_config()) - -load_config() -client = openai.OpenAI(api_key=key, - base_url="https://open.bigmodel.cn/api/paas/v4/") - - -# WebSocket连接管理器 -class ConnectionManager: - def __init__(self): - self.active_connections: List[WebSocket] = [] - - async def connect(self, websocket: WebSocket): - await websocket.accept() - self.active_connections.append(websocket) - - def disconnect(self, websocket: WebSocket): - self.active_connections.remove(websocket) - - @staticmethod - async def send_personal_message(message: str, websocket: WebSocket): - await websocket.send_text(message) - - async def broadcast_json(self, data: dict): - for connection in self.active_connections: - await connection.send_json(data) +config_file = "./config.toml" +init() +config = Config.load(config_file) +client = OpenAI(api_key=config.key, base_url=config.base_url) manager = ConnectionManager() @@ -78,18 +58,18 @@ async def event(websocket: WebSocket): async def predict(file: UploadFile = File(...)): # 读取图片文件并转换为base64编码 image_data = await file.read() - image_base64 = base64.b64encode(image_data).decode('utf-8') + image_base64 = b64encode(image_data).decode('utf-8') # 构造请求给API response = client.chat.completions.create( - model="glm-4v", + model=config.model, messages=[ { "role": "user", "content": [ { "type": "text", - "text": get_prompt(), + "text": config.prompt, }, { "type": "image_url", @@ -117,6 +97,7 @@ async def predict(file: UploadFile = File(...)): return StreamingResponse(stream_response(), media_type="text/plain") +# html页面 @app.get("/terminal", response_class=HTMLResponse) async def test(): with open("html/terminal.html", "r", encoding="utf-8") as f: diff --git a/requirements.txt b/requirements.txt index d13bf05..94be491 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,7 @@ websockets~=13.1 openai~=1.52.2 fastapi~=0.115.3 uvicorn~=0.20.0 -python-multipart~=0.0.12 \ No newline at end of file +python-multipart~=0.0.12 +requests~=2.27.1 +toml~=0.10.2 +pydantic~=2.9.2 \ No newline at end of file