from flask import Flask, render_template
from flask_socketio import SocketIO
import itchat
from itchat.content import TEXT
import logging
import threading
import json

app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    filename='wechat_bot.log'
)

# 默认配置
DEFAULT_CONFIG = {
    "friends": {
        "张三": "您好，我现在是自动回复状态，稍后会回复您",
        "李四": "您好，我正在忙，稍后回复您"
    },
    "groups": {
        "测试群": "群消息已收到，请稍候",
        "工作群": "工作消息已收到，正在处理"
    },
    "default": "自动回复：消息已收到",
    "ai_model": {
        "enable": False,
        "api_key": "",
        "model": "qwen-plus",
        "local_mode": False,
        "local_endpoint": "http://localhost:8000/v1/chat/completions"
    }
}

# 当前配置
current_config = DEFAULT_CONFIG.copy()

def load_config():
    global current_config
    try:
        with open('config.json', 'r') as f:
            current_config = json.load(f)
    except:
        save_config()

def save_config():
    with open('config.json', 'w') as f:
        json.dump(current_config, f, indent=4)

def get_ai_response(prompt):
    """获取AI回复"""
    if not current_config['ai_model']['enable']:
        return None
        
    try:
        if current_config['ai_model']['local_mode']:
            # 本地模型调用
            import requests
            response = requests.post(
                current_config['ai_model']['local_endpoint'],
                headers={"Content-Type": "application/json"},
                json={
                    "model": current_config['ai_model']['model"],
                    "messages": [{"role": "user", "content": prompt}],
                    "stream": False
                }
            )
            return response.json()['choices'][0]['message']['content']
        else:
            # API调用
            from InsCode import InsCode
            client = InsCode(api_key=current_config['ai_model']['api_key'])
            response = client.chat.completions.create(
                model=current_config['ai_model']['model'],
                messages=[{"role": "user", "content": prompt}],
                stream=False
            )
            return response.choices[0].message.content
    except Exception as e:
        logging.error(f"AI回复失败: {str(e)}")
        return None

# 微信消息处理
@itchat.msg_register(TEXT)
def text_reply(msg):
    try:
        # 判断消息来源
        if msg['FromUserName'] == msg['ToUserName']:
            # 个人消息
            friend = itchat.search_friends(userName=msg['FromUserName'])
            nickname = friend['NickName'] if friend else "未知好友"
            if nickname in current_config['friends']:
                return current_config['friends'][nickname]
        else:
            # 群消息
            chatroom = itchat.search_chatrooms(userName=msg['FromUserName'])
            chatroom_name = chatroom['NickName'] if chatroom else "未知群聊"
            if chatroom_name in current_config['groups']:
                return current_config['groups'][chatroom_name]
        
        # 尝试AI回复
        if current_config['ai_model']['enable']:
            ai_response = get_ai_response(msg['Text'])
            if ai_response:
                return ai_response
        
        return current_config['default']
    except Exception as e:
        logging.error(f"处理消息时出错: {str(e)}")
        return None

def run_wechat_bot():
    itchat.auto_login(hotReload=True)
    itchat.run()

@app.route('/')
def index():
    return render_template('index.html')

@socketio.on('update_config')
def handle_update_config(config):
    global current_config
    current_config = config
    save_config()
    socketio.emit('config_updated', {'status': 'success'})

@socketio.on('get_config')
def handle_get_config():
    socketio.emit('current_config', current_config)

if __name__ == '__main__':
    load_config()
    # 启动微信机器人线程
    wechat_thread = threading.Thread(target=run_wechat_bot)
    wechat_thread.daemon = True
    wechat_thread.start()
    # 启动Flask应用
    socketio.run(app, port=5000, debug=True)