From 97eef02a9294f492200b1315565660a9789733ee Mon Sep 17 00:00:00 2001 From: heshunme Date: Sat, 26 Oct 2024 15:02:43 +0800 Subject: [PATCH] =?UTF-8?q?=E6=96=B0=E5=A2=9E=E5=AF=B9reload=E4=BF=A1?= =?UTF-8?q?=E5=8F=B7=E7=9A=84=E5=A4=84=E7=90=86=EF=BC=8C=E5=AE=9E=E7=8E=B0?= =?UTF-8?q?=E5=AF=B9systemctl=20reload=E7=9A=84=E5=93=8D=E5=BA=94?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- main.py | 32 +++++++++++++++++++++++++++----- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/main.py b/main.py index 9239202..b91395f 100644 --- a/main.py +++ b/main.py @@ -3,14 +3,36 @@ from fastapi.responses import StreamingResponse, HTMLResponse from typing import List import base64 import openai +import signal app = FastAPI() -with open("key", "r") as k: - key = k.read() +key = "" +prompt = "" + + +def get_key(): + with open("key", "r") as f: + k = f.read() + return k + + +def get_prompt(): + with open("prompt", "r", encoding="utf-8") as f: + p = f.read() + return p + + +def load_config(): + global key, prompt + key = get_key() + prompt = get_prompt() + + +signal.signal(signal.SIGHUP, load_config()) + +load_config() client = openai.OpenAI(api_key=key, base_url="https://open.bigmodel.cn/api/paas/v4/") -with open("prompt", "r", encoding="utf-8") as p: - prompt = p.read() # WebSocket连接管理器 @@ -67,7 +89,7 @@ async def predict(file: UploadFile = File(...)): "content": [ { "type": "text", - "text": prompt, + "text": get_prompt(), }, { "type": "image_url",