Skip to content

Commit

Permalink
Merge pull request #932 from Ikaros-521/owner
Browse files Browse the repository at this point in the history
弃用flask,改用fastapi(测试了 聊天页发送、微信号对接,暂时没有发现问题。稳定性待测试)
  • Loading branch information
Ikaros-521 authored Jul 23, 2024
2 parents 1ce09f9 + 7a963dc commit 489abc0
Show file tree
Hide file tree
Showing 7 changed files with 249 additions and 113 deletions.
File renamed without changes.
251 changes: 151 additions & 100 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,6 @@
import http.cookies
from typing import *

from flask import Flask, send_from_directory, render_template, request, jsonify
from flask_cors import CORS

# 按键监听语音聊天板块
import keyboard
import pyaudio
Expand Down Expand Up @@ -144,93 +141,92 @@ def start_server():
if platform != "wxlive":
# HTTP API线程
def http_api_thread():
app = Flask(__name__, static_folder='./')
CORS(app) # 允许跨域请求
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from utils.models import SendMessage, LLMMessage, CallbackMessage, CommonResult

# 定义FastAPI应用
app = FastAPI()

# 允许跨域
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

logger.info("HTTP API线程已启动!")

@app.route('/send', methods=['POST'])
def send():
# 定义POST请求路径和处理函数
@app.post("/send")
async def send(msg: SendMessage):
global my_handle, config

try:
try:
data_json = request.get_json()
logger.info(f"API收到数据:{data_json}")

if data_json["type"] in ["reread", "reread_top_priority"]:
my_handle.reread_handle(data_json, type=data_json["type"])
elif data_json["type"] == "comment":
my_handle.process_data(data_json, "comment")
elif data_json["type"] == "tuning":
my_handle.tuning_handle(data_json)
elif data_json["type"] == "gift":
my_handle.gift_handle(data_json)
elif data_json["type"] == "entrance":
my_handle.entrance_handle(data_json)

return jsonify({"code": 200, "message": "发送数据成功!"})
except Exception as e:
logger.error(f"发送数据失败!{e}")
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})
tmp_json = msg.dict()
logger.info(f"API收到数据:{tmp_json}")
data_json = tmp_json["data"]

if data_json["type"] in ["reread", "reread_top_priority"]:
my_handle.reread_handle(data_json, type=data_json["type"])
elif data_json["type"] == "comment":
my_handle.process_data(data_json, "comment")
elif data_json["type"] == "tuning":
my_handle.tuning_handle(data_json)
elif data_json["type"] == "gift":
my_handle.gift_handle(data_json)
elif data_json["type"] == "entrance":
my_handle.entrance_handle(data_json)

return CommonResult(code=200, message="成功")
except Exception as e:
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})

@app.route('/llm', methods=['POST'])
def llm():
logger.error(f"发送数据失败!{e}")
return CommonResult(code=-1, message=f"发送数据失败!{e}")

@app.post("/llm")
async def llm(msg: LLMMessage):
global my_handle, config

try:
try:
data_json = request.get_json()
logger.info(f"API收到数据:{data_json}")

resp_content = my_handle.llm_handle(data_json["type"], data_json, webui_show=False)

return {"code": 200, "msg": "成功", "data": {"content": resp_content}}
data_json = msg.dict()
logger.info(f"API收到数据:{data_json}")

# return jsonify({"code": 200, "message": "调用LLM成功!"})
except Exception as e:
logger.error(f"调用LLM失败!{e}")
return {"code": -1, "msg": f"调用LLM失败!{e}"}
return jsonify({"code": -1, "msg": f"调用LLM失败!{e}"})
resp_content = my_handle.llm_handle(data_json["type"], data_json, webui_show=False)

return CommonResult(code=200, message="成功", data={"content": resp_content})
except Exception as e:
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})

@app.route('/callback', methods=['POST'])
def callback():
logger.error(f"调用LLM失败!{e}")
return CommonResult(code=-1, message=f"调用LLM失败!{e}")

@app.post("/callback")
async def callback(msg: CallbackMessage):
global my_handle, config, global_idle_time

try:
try:
data_json = request.get_json()
logger.info(f"API收到数据:{data_json}")

# 音频播放完成
if data_json["type"] in ["audio_playback_completed"]:
# 如果等待播放的音频数量大于10
if data_json["data"]["wait_play_audio_num"] > int(config.get("idle_time_task", "wait_play_audio_num_threshold")):
logger.info(f'等待播放的音频数量大于限定值,闲时任务的闲时计时由 {global_idle_time} -> {int(config.get("idle_time_task", "idle_time_reduce_to"))}秒')
# 闲时任务的闲时计时 清零
global_idle_time = int(config.get("idle_time_task", "idle_time_reduce_to"))


return jsonify({"code": 200, "message": "callback处理成功!"})
except Exception as e:
logger.error(f"callback处理失败!{e}")
return jsonify({"code": -1, "message": f"callback处理失败!{e}"})
data_json = msg.dict()
logger.info(f"API收到数据:{data_json}")

# 音频播放完成
if data_json["type"] in ["audio_playback_completed"]:
# 如果等待播放的音频数量大于10
if data_json["data"]["wait_play_audio_num"] > int(config.get("idle_time_task", "wait_play_audio_num_threshold")):
logger.info(f'等待播放的音频数量大于限定值,闲时任务的闲时计时由 {global_idle_time} -> {int(config.get("idle_time_task", "idle_time_reduce_to"))}秒')
# 闲时任务的闲时计时 清零
global_idle_time = int(config.get("idle_time_task", "idle_time_reduce_to"))

return CommonResult(code=200, message="callback处理成功!")
except Exception as e:
return jsonify({"code": -1, "message": f"callback处理失败!{e}"})

logger.error(f"callback处理失败!{e}")
return CommonResult(code=-1, message=f"callback处理失败!{e}")

app.run(host="0.0.0.0", port=config.get("api_port"), debug=False)
logger.info("HTTP API线程已启动!")
uvicorn.run(app, host="0.0.0.0", port=config.get("api_port"))


# HTTP API线程并启动
schedule_thread = threading.Thread(target=http_api_thread)
schedule_thread.start()
inside_http_api_thread = threading.Thread(target=http_api_thread)
inside_http_api_thread.start()


# 添加用户名到最新的用户名列表
Expand Down Expand Up @@ -1445,6 +1441,9 @@ def init_session():

# logger.info(f"SESSDATA={SESSDATA}")

# logger.warning(f"sessdata={SESSDATA}")
# logger.warning(f"cookies={cookies}")

session = aiohttp.ClientSession()
session.cookie_jar.update_cookies(cookies)

Expand Down Expand Up @@ -3001,25 +3000,36 @@ async def on_follow(event: FollowEvent):
logger.error(traceback.format_exc())
my_handle.abnormal_alarm_handle("platform")
elif platform == "wxlive":

app = Flask(__name__)
CORS(app) # 允许跨域请求
import uvicorn
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware
from utils.models import SendMessage, LLMMessage, CallbackMessage, CommonResult

# 用于去重用的列表
# 定义FastAPI应用
app = FastAPI()
seq_list = []

@app.route('/wxlive', methods=['POST'])
def wxlive():
# 允许跨域
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

@app.post("/wxlive")
async def wxlive(request: Request):
global my_handle, config

try:
# 获取 POST 请求中的数据
data = request.json
data = await request.json()
# 这里可以添加代码处理接收到的数据
logger.debug(data)

if data['events'][0]['seq'] in seq_list:
return jsonify({"code": 1, "message": "重复数据过滤"})
return CommonResult(code=-1, message="重复数据过滤")

# 如果列表长度达到30,移除最旧的元素
if len(seq_list) >= 30:
Expand Down Expand Up @@ -3066,38 +3076,77 @@ def wxlive():
pass

# 响应
return jsonify({"code": 200, "message": "成功接收"})
return CommonResult(code=200, message="成功接收")
except Exception as e:
logger.error(traceback.format_exc())
my_handle.abnormal_alarm_handle("platform")
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})

@app.route('/send', methods=['POST'])
def send():
return CommonResult(code=-1, message=f"发送数据失败!{e}")

# 定义POST请求路径和处理函数
@app.post("/send")
async def send(msg: SendMessage):
global my_handle, config

try:
try:
data_json = request.get_json()
logger.info(f"API收到数据:{data_json}")
tmp_json = msg.dict()
logger.info(f"API收到数据:{tmp_json}")
data_json = tmp_json["data"]

if data_json["type"] in ["reread", "reread_top_priority"]:
my_handle.reread_handle(data_json, type=data_json["type"])
elif data_json["type"] == "comment":
my_handle.process_data(data_json, "comment")
elif data_json["type"] == "tuning":
my_handle.tuning_handle(data_json)
elif data_json["type"] == "gift":
my_handle.gift_handle(data_json)
elif data_json["type"] == "entrance":
my_handle.entrance_handle(data_json)

return CommonResult(code=200, message="成功")
except Exception as e:
logger.error(f"发送数据失败!{e}")
return CommonResult(code=-1, message=f"发送数据失败!{e}")

@app.post("/llm")
async def llm(msg: LLMMessage):
global my_handle, config

if data_json["type"] in ["reread", "reread_top_priority"]:
my_handle.reread_handle(data_json, type=data_json["type"])
elif data_json["type"] == "comment":
my_handle.process_data(data_json, "comment")
elif data_json["type"] == "tuning":
my_handle.tuning_handle(data_json)
try:
data_json = msg.dict()
logger.info(f"API收到数据:{data_json}")

return jsonify({"code": 200, "message": "发送数据成功!"})
except Exception as e:
logger.error(f"发送数据失败!{e}")
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})
resp_content = my_handle.llm_handle(data_json["type"], data_json, webui_show=False)

return CommonResult(code=200, message="成功", data={"content": resp_content})
except Exception as e:
return jsonify({"code": -1, "message": f"发送数据失败!{e}"})

app.run(host="0.0.0.0", port=config.get("api_port"), debug=False)
# app.run(host="0.0.0.0", port=8082, debug=True)
logger.error(f"调用LLM失败!{e}")
return CommonResult(code=-1, message=f"调用LLM失败!{e}")

@app.post("/callback")
async def callback(msg: CallbackMessage):
global my_handle, config, global_idle_time

try:
data_json = msg.dict()
logger.info(f"API收到数据:{data_json}")

# 音频播放完成
if data_json["type"] in ["audio_playback_completed"]:
# 如果等待播放的音频数量大于10
if data_json["data"]["wait_play_audio_num"] > int(config.get("idle_time_task", "wait_play_audio_num_threshold")):
logger.info(f'等待播放的音频数量大于限定值,闲时任务的闲时计时由 {global_idle_time} -> {int(config.get("idle_time_task", "idle_time_reduce_to"))}秒')
# 闲时任务的闲时计时 清零
global_idle_time = int(config.get("idle_time_task", "idle_time_reduce_to"))

return CommonResult(code=200, message="callback处理成功!")
except Exception as e:
logger.error(f"callback处理失败!{e}")
return CommonResult(code=-1, message=f"callback处理失败!{e}")

logger.info("HTTP API线程已启动!")
uvicorn.run(app, host="0.0.0.0", port=config.get("api_port"))

elif platform == "youtube":
import pytchat

Expand Down Expand Up @@ -3228,6 +3277,8 @@ def exit_handler(signum, frame):
platform = config.get("platform")

if platform == "bilibili2":
from typing import Optional

# 这里填一个已登录账号的cookie。不填cookie也可以连接,但是收到弹幕的用户名会打码,UID会变成0
SESSDATA = ''

Expand Down
3 changes: 0 additions & 3 deletions requirements_common.txt
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,6 @@ fastapi-socketio==0.0.10
faster-whisper==1.0.0
ffmpy==0.3.1
filelock==3.12.2
Flask==3.0.0
Flask-Cors==4.0.0
Flask-SocketIO==5.3.6
flatbuffers==23.5.26
fonttools==4.47.2
frozenlist==1.3.3
Expand Down
Binary file added tests/test_aibote/1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
43 changes: 43 additions & 0 deletions tests/test_aibote/aibote.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# 1. 导入 WinBotMain 类
from PyAibote import WinBotMain
import time,os

# 2. 自定义一个脚本类,继承 WinBotMain
class CustomWinScript(WinBotMain):

# 2.1. 设置是否终端打印输出 DEBUG:输出, INFO:不输出, 默认打印输出
Log_Level = "DEBUG"

# 2.2. 终端打印信息是否存储LOG文件 True: 储存, False:不存储
Log_Storage = True


# 2.3. 注意:script_main 此方法是脚本执行入口必须存在此方法
def script_main(self):
# 查询所有窗口句柄
# result = self.find_windows()
# print(result)


# 使用示例 [Demo]
result = self.init_speech_clone_service("178asdf325c95eafdaaasee3bbf64741", "tIdj8l8nPdqV86Ueasdf")
print(result)

# 使用示例 [Demo]
# result = self.init_metahuman("D:/AiboteMetahuman/metahumanMode", 0.5, 0.5, False)
# print(result)

result = self.train_human_model(
"dfjklDJFLJlfjkdljf",
"E:\\GitHub_pro\\AI-Vtuber\\tests\\test_aibote\\1.png",
"E:\\GitHub_pro\\AI-Vtuber\\tests\\test_aibote\\humanModel",
"E:\\GitHub_pro\\AI-Vtuber\\tests\\test_aibote\\newHumanModel"
)
print(result)


if __name__ == '__main__':
# 3. IP为:0.0.0.0, 监听 9999 号端口
# 3.1. 在远端部署脚本时,请设置 Debug=False,客户端手动启动 WindowsDriver.exe 时需指定远端 IP 或端口号
# 3.2. 命令行启动示例:"127.0.0.1" 9999 {'Name':'PyAibote'}
CustomWinScript.execute("0.0.0.0", 9999, Debug=True)
Loading

0 comments on commit 489abc0

Please sign in to comment.