from functools import cached_property
from io import BytesIO
import io
import pathlib
from typing import AsyncIterable
from httpx import AsyncClient
import pandas as pd
import subprocess
import tempfile
from loguru import logger
import re
import requests
from config import AI_URL, AI_KEY, AI_MODEL, BOM_EXDB_PATH, BOMFILE_PATH, DECODE_URL, MONGO_URL
from openai import AsyncOpenAI
import traceback
import aiofiles
import os
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
import base64
from pydantic import BaseModel, field_validator
from molingtools.nosql.mongo import Mongoer
from molingtools.time import getNowTime
import gradio as gr


class Bom(BaseModel):
    id: str
    name: str
    info: str
    num: int
    pin_num: int
    pcb_footprint: str|None = None
    
    @field_validator('num', 'pin_num', mode='before')
    def num_make(cls, value, data):
        if value: return int(float(value))
        else: return 0

    @cached_property
    def all_pin_num(self):
        return self.num*self.pin_num
    
    @cached_property
    def is_check(self):
        return bool(re.match(r'\d+$', self.id))
    
def to_bom_map(path:str, is_df=False, is_bt=True)->dict[str, Bom]|pd.DataFrame:
    try:
        if not path: raise ValueError('未上传BOM文件')
        if is_bt:
            data = file_decode(path)
        else:
            data = path
        # 寻找列名位置
        for index, row in enumerate(pd.read_excel(data, header=None).fillna('') \
                                    .map(lambda x:str(x).strip()) \
                                    .to_records(index=False) \
                                    .tolist()):
            if '子件编码' in row or '物料编码' in row or '存货编码' in row: break
        bom_df:pd.DataFrame =pd.read_excel(data, header=index, dtype=str).fillna('').map(lambda x:str(x).strip())
        if is_df: return bom_df
        bom_df=bom_df.rename(columns={'子件编码':'物料编码', '子件名称':'物料名称', '子件规格':'物料描述', 
                                        '存货编码':'物料编码', '存货名称':'物料名称', '规格型号':'物料描述', 'PCB footprint': 'PCB Footprint'})
        if '基本用量' in bom_df.columns and bom_df['基本用量'].iloc[0]: 
            num_key = '基本用量'
        else:
            num_key = '使用数量'
        bom_map = {row['物料编码']: Bom(id=row['物料编码'], name=row['物料名称'], info=row['物料描述'], 
                                        num=row.get(num_key), pin_num=row.get('PIN数量'), pcb_footprint=row.get('PCB Footprint'))
                    for row in bom_df.to_dict(orient='records') if row['物料编码']}
        return bom_map
    except Exception as e:
        if str(e)=='Excel file format cannot be determined, you must specify an engine manually.': e = 'excel文件被加密, 无法读取'
        raise ValueError(f'🚫BOM文件格式错误\n{e}')

def get_exdb_map()->dict[str, str]:
    exdb = {}
    for sheel_name, df in pd.read_excel(BOM_EXDB_PATH, sheet_name=None).items():
        for dt in df.to_dict(orient='records'):
            if dt.get('Part Number') and dt.get('PCB Footprint'):
                exdb[dt['Part Number']] = dt['PCB Footprint']
    return exdb

def _derive_key(password: str, salt: bytes) -> bytes:
    """从密码派生密钥"""
    kdf = PBKDF2HMAC(
        algorithm=hashes.SHA256(),
        length=32,
        salt=salt,
        iterations=100000,
    )
    key = base64.urlsafe_b64encode(kdf.derive(password.encode()))
    return key

def encrypt(file_data: bytes, password: str) -> bytes:
    """加密文件"""
    # 生成随机盐值
    salt = os.urandom(16)
    # 派生密钥
    key = _derive_key(password, salt)
    fernet = Fernet(key)
    # 加密数据
    encrypted_data = fernet.encrypt(file_data)
    return salt+encrypted_data

def decrypt(encrypted_data: bytes, password: str) -> bytes:
    """解密文件"""
    # 读取加密文件
    salt, encrypted_data = encrypted_data[:16], encrypted_data[16:]
    # 派生相同的密钥
    key = _derive_key(password, salt)
    fernet = Fernet(key)
    try:
        # 解密数据
        decrypted_data = fernet.decrypt(encrypted_data)
        return decrypted_data
    except Exception:
        raise ValueError("解密失败，可能是密码错误或文件已损坏")
    

def getAnalysis(analysis, initial='',probe='')->str:
    with open('template/analyse.tp', 'r', encoding='utf-8') as f:
        return f.read().format(initial=initial and f'.ic {initial}', 
                               probe=probe and f'.probe {probe}',
                               analysis=analysis)

# 获取执行结果
def getResultDf(circuit: str, analyse:str, select:list, ml='')->tuple[pd.DataFrame|None,str,str]:
    with (tempfile.NamedTemporaryFile(delete=False, suffix=".net", mode='w') as f1,
          tempfile.NamedTemporaryFile(delete=False, suffix=".txt", mode='w') as f2):
        with open('template/run.tp',mode='r', encoding='utf-8') as runf: 
            run_text = runf.read().format(ml=ml, path=f2.name, select=' '.join(select))
        f1.write('\n\n'.join((circuit,analyse,run_text)))
        logger.info(f'code_path: {f1.name}')
        logger.info(f'result_path: {f2.name}')
        output, outerr='',''
    try:
        # 使用 subprocess 模块调用命令行执行 Python 命令，并设置超时时间为 3 秒
        process = subprocess.run(f'ngspice -b {f1.name}', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=20)
        # 获取命令执行的输出
        output = process.stdout.decode('utf-8')
        outerr = process.stderr.decode('utf-8')
    except subprocess.TimeoutExpired:
        outerr = "Command execution timed out"
    if outerr and re.search('Error:', outerr): logger.error(outerr)
    outerr = outerr and f'⚠️❌\n{outerr}'
    # 使用 pandas 读取数据（自动识别空格分隔）
    try:
        df = pd.read_csv(f2.name, delimiter=r'\s+')
    except:
        df = None
    return df, output+outerr, run_text

_client = AsyncOpenAI(base_url=AI_URL, api_key=AI_KEY) if AI_URL and AI_KEY else None
async def aiResulIt(prompt:str, question:str, temperature: float=0.7, **kwargs)->AsyncIterable[str]:
    if _client: 
        try:
            async with aiofiles.open(f'template/prompt/{prompt}.md',mode='r') as f:
                    prompt = await f.read()
            async for chunk in await _client.chat.completions.create(
                    model=AI_MODEL,
                    messages=[
                        {"role": "system", "content": prompt},
                        {"role": "user", "content": question}
                    ],
                    stream=True,
                    temperature=temperature,
                    **kwargs
                ):
                yield chunk.choices[0].delta.content
        except Exception as e:
            traceback.print_exc()
            yield f'```\n🚫调用AI服务出错\n{e}, 请联系管理员\n```'
    else:
        yield '```\n⚠️未配置AI服务参数, 该服务不可用\n```'

def getBOMains(wl:str=None)->dict[str, Bom]|pd.DataFrame|str:
    if not BOMFILE_PATH: return 'BOM总表路径未配置'
    path, name_qz = re.search(r'(.+?)/([^/]+)$', BOMFILE_PATH).groups()
    for file in os.listdir(path):
        if file.startswith(name_qz):
            fpath = f'{path}/{file}'
            if wl is None: 
                return to_bom_map(fpath, is_bt=False)
            else:
                df = to_bom_map(fpath, is_df=True, is_bt=False)
                return df.loc[df['存货编码'].str.contains(wl)].iloc[:200]
    return 'BOM总表文件未匹配到'

MSE = MONGO_URL and Mongoer(MONGO_URL, 'log', 'net_check')

def save_log(act, request: gr.Request):
    ip = request.request.headers.get("X-Forwarded-For") or request.request.client.host
    if MSE:
        try:
            MSE.insert({'ip': ip, 'act': act, 'date': str(getNowTime())})
        except:
            traceback.print_exc()
            logger.info(f'{ip}: {act}')
    else:
        logger.info(f'{ip}: {act}')
        
def file_decode(path):
    with open(path, 'rb') as file:  
        pather = pathlib.Path(path)
        response = requests.post(DECODE_URL+'/file', files={'file': (pather.name, file.read())})  
        return io.BytesIO(response.content)