from fdfs_client.client import get_tracker_conf,Fdfs_client
import requests
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from db_server.models import Vm_last_status
from flask import current_app


host = '172.16.13.1'
user = 'root'
password = '123456'
port = 3306

fdfs_conf1 = 'client.conf'
fdfs_conf2 ="C:\\Users\\Admin\\Desktop\\client0.conf"


def delete_fdfs(path):
    """
    #删除FDFS
    :param path:文件的绝对路径
    :return: 上传后FDFS返回的信息
    """

    trackers = get_tracker_conf(fdfs_conf2)  #todo，这个需要再容器里改一下，实际使用，和本地测试不一样
    client = Fdfs_client(trackers)
    try:
        ret = client.delete_file(path)
        return ret

    except Exception as e:
        print(e)
        return "删除失败，没有这个文件"



def upload_file_stream(file_obj):
    """
    文件不落地，直接上传。用二进制流上传文件。
    :param file_obj:
    :return:
    """

    file_data = file_obj.read()

    trackers = get_tracker_conf(fdfs_conf2)  # todo，这个需要再容器里改一下，实际使用，和本地测试不一样
    client = Fdfs_client(trackers)
    try:
        # 上传图片到fastDFS
        # 采用二进制流传送
        ret = client.upload_appender_by_buffer(file_data)
        return ret
    except Exception as e:
        print(e)
        return "上传失败{0}".format(e)



def upload_mydb(payload:dict,dbname:str,type,address,port):
    """
    上传mysql
    api,向数据库端发送请求，把数据打过去就行，接受数据端的返回就可以。
    :param payload:  入库所需要的数据。
    :param address: 数据库端地址。
    :param port:
    :return:
    """
    # print(payload)
    # print("http://"+ str(address) +":"+str(port)+"/db/"+ dbname + "/excel/add")

    if type == "zip":
        resp = requests.post("http://"+ str(address) +":"+str(port)+"/db/"+ dbname + "/vm_latest/add", json=payload)
    elif type == "leader":

        resp = requests.post("http://" + str(address) + ":" + str(port) + "/db/" + dbname + "/excel/add/leader", json=payload)

    elif type == "lower":
        resp = requests.post("http://" + str(address) + ":" + str(port) + "/db/" + dbname + "/excel/add/lower",
                             json=payload)

    elif type == "delete":
        resp = requests.post("http://"+ str(address) +":"+str(port)+"/db/"+ dbname + "/excel/delete", json=payload)


    else:
        resp = requests.post("http://"+ str(address) +":"+str(port)+"/db/"+ dbname + "/excel/add", json=payload)

    return resp




def data_add(data,pst,ret):
    """
    暗中抽取用户数据
    :param data:   端传回来的
    :param pst:   我自己查询的
    :param ret:  上传文件后返回的
    :return:
    """
    try:


        print(data)

        database = pst.dbname
        print(database)
        conn_str = 'mysql+pymysql://{}:{}@{}:{}/{}'.format(user, password, host, port, database)
        engine = sqlalchemy.create_engine(conn_str, echo=True)

        # 所有的都是这个模型，所以可以提前写好，应为可以复用，大量复用。
        Session = sessionmaker(bind=engine)
        session = Session()
        #下面的参数都是传参数，也就是放在requests里传进来的，filename,path,status,date,work,由虚拟机传过来，user,role,depart登陆之后，这个用户的信息，要有一个token，一直跟到虚拟机，
        #再由虚拟机跟过来。否则，这个是完全解耦合的，你查不到权限库。


        department = pst.department
        department_id = pst.department_id
        role = pst.business
        user_id = pst.user_id

        # file_name = ret.get('Local file name')
        # filename = file_name.split('\\')[-1]
        filename = data.get("file_name")
        # mysql_date['path'] = "http://" + ret.get("Storage IP").decode() + "/" + ret.get('Remote file_id').decode()  # 最终路径
        filepath = ret.get('Remote file_id').decode()
        file_ip = ret.get('Storage IP').decode()

        describe = data.get("describe","无备注").split("\r\n")[0]
        pc_phone = data.get("pc_phone").split("\r\n")[0]
        user_name = data.get("user_name").split("\r\n")[0]
        role_id = 2


        print(user_name,filename,pc_phone,role)

        file = session.query(Vm_last_status).filter(Vm_last_status.filename == filename).filter(
            Vm_last_status.pc_phone == pc_phone).filter(Vm_last_status.role == role).all()



        print(file,99999999999999)
        # todo 4.2做一个防止重复的查询，如果有重复的就更新路径，没有就新增，但是查询时候还要根据业务查一下。exchang版是否要改变这个设定，待定


        if file:
            for i in file:
                print(type(i))
                print(i.filename)
                print(i.fgroup)
                print(i.path)

                delete_fdfs(i.path)
                i.path = filepath
                try:
                    session.add(i)
                    session.commit()
                    session.close()
                    return  200
                except Exception as e:
                    session.rollback()
                    session.close()
                    print(e, "记录日志")
                    return  401

        else:
            if all([department,department_id]):

                excel = Vm_last_status(filename=filename,path=filepath,deleted = 0,
                           user_id=user_id,user_name = user_name,role=role,
                           role_id= role_id,fgroup=file_ip,
                           department_id=department_id,
                            department = department,
                           pc_phone=pc_phone,describe = describe)

            else:
                excel = Vm_last_status(filename=filename,path=filepath,deleted = 0,
                           user_id=user_id,user_name = user_name,role=role,
                           role_id= role_id,fgroup=file_ip,
                                       pc_phone=pc_phone, describe=describe)

            try:
                session.add(excel)
                session.commit()
                session.close()
                return 200
            except Exception as e:
                session.rollback()
                print(e,"记录日志")
                session.close()
                return 404



    except Exception as e:

        current_app.logger.error("error_msg: %s remote_ip: %s user_agent: %s ",e)
        print("记录日志",e)
        return 500

if __name__ == '__main__':
    print(delete_fdfs(b'group1/M00/00/01/rBANAV3tr52APVPAAAAFsEslqZ418.conf'))


