import json
import logging.handlers
import os

import pymysql

from config import DIR_PATH
from bs4 import BeautifulSoup

def common_assert(resp,expect_code=None,expect_json=None,expect_text=None):
    if expect_code:
        assert resp.status_code == expect_code ,"状态码断言错误，预期状态码:{}，实际状态码:{}".format(expect_code,resp.status_code)
    if expect_json:
        assert expect_json in resp.json()["description"],"JSON断言错误，预期JSON数据:{}，实际JSON数据:{}".format(expect_json,resp.json()["description"])
    if expect_text:
        assert expect_text in resp.text,"文本断言错误，预期文本:{}，实际文本:{}".format(expect_text,resp.text)

def read_json(filename,keys):
    filepath = DIR_PATH + os.sep + "data" + os.sep + filename
    with open(filepath,"r",encoding="utf-8") as f:
        datas = json.load(f)
        data_list = []
        for i in datas.get(keys):
            # 利用切片跳过第一个描述信息
            data_list.append(tuple(i.values())[1:])
        return data_list


class GetLog:
    logger = None
    @classmethod
    def get_logger(cls):
        if cls.logger is None:
            # 获取日志器
            cls.logger = logging.getLogger()
            # 获取入口级别
            cls.logger.setLevel(logging.INFO)
            # 设置文件
            filename = DIR_PATH + os.sep + "log" + os.sep + "log.log"
            tf = logging.handlers.TimedRotatingFileHandler(filename=filename,
                                                           when="midnight",
                                                           interval=1,
                                                           backupCount=30,
                                                           encoding='utf-8')
            # 设置格式
            fm = "%(asctime)s %(levelname)10s [%(name)s] [%(filename)s(%(funcName)s:%(lineno)d)] - %(message)s"
            fmt = logging.Formatter(fm)
            # 将格式添加到文件中
            tf.setFormatter(fmt)
            # 将文件添加到日志器中
            cls.logger.addHandler(tf)
        return cls.logger

# 通过BeautifulSoup提取三方mock中的关键数据
def read_html(resp):
    # 2、实例化(获取文件内容)
    html_test = resp.json()["description"]["form"]
    # 3、调用方法 查找全部元素
    bs = BeautifulSoup(html_test, "html.parser")

    url = bs.form.get("action")
    print("提取的三方URL为:",url)
    data = {}
    for i in bs.find_all("input"):
        data[i.get("name")] = i.get("value")
    return url,data

# 连接数据库工具
# def connect_mysql(sql):
#     conn = None
#     cursor = None
#     try:
#
#         # 获取连接对象
#         conn = pymysql.connect(host="121.43.169.97",
#                                user="student",
#                                password="P2P_student_2022",
#                                database="czbk_member",
#                                port=3306,
#                                charset='utf8',
#                                autocommit=True)
#         # 获取游标对象
#         cursor = conn.cursor()
#         # 执行SQL语句
#         cursor.execute(sql)
#         if sql.lower().split(" ")[0] == "select":
#             return cursor.fetchall()
#         else:
#             return cursor.rowcount
#     except Exception as e:
#         print(e)
#         conn.rollback()
#     finally:
#         if cursor:
#             cursor.close()
#         if conn:
#             conn.close()

# def clear_data():
#     sql_1 = """
#
#     """
#     connect_mysql(sql_1)
#     sql_2 = """
#
#     """
#     sql_3 = """
#
#     """
#     sql_4 = """
#
#     """
