import json
import logging
import os
import unittest
from logging import handlers

import pymysql
from bs4 import BeautifulSoup
from config import DIR_PATH, LOG_PATH


def common_assert(self: unittest.TestCase, resp, expect):
    self.assertEqual(expect["expect_code"], resp.status_code)
    self.assertEqual(expect["expect_status"], resp.json()["status"])
    self.assertIn(expect["expect_description"], resp.text)


def read_data(filename, key):
    path = DIR_PATH + os.sep + "data" + os.sep + filename
    data_list = []
    with open(path, "r", encoding="utf-8") as f:
        for data in json.load(f).get(key):
            data_list.append(tuple(data.values())[1:])
    return data_list


def read_html(file, features, label):
    bs = BeautifulSoup(file, features)
    key_value = {}
    url = bs.form.get("action")
    for val in bs.find_all(label):
        key_value[val.get("name")] = val.get("value")
    return url, key_value


def init_log_config(filename, when="midnight", interval=1, backup_count=7):
    """
        功能：初始化日志配置函数
        :param filename: 日志文件名
        :param when: 设定日志切分的间隔时间单位
        :param interval: 间隔时间单位的个数
        :param backup_count: 保留日志文件的个数
        :return: None
    """

    # 创建日志器对象
    logger = logging.getLogger()

    # 设置日志打印级别: DEBUG / INFO / WARNING / ERROR / CRITICAL
    logger.setLevel(logging.INFO)

    # 创建输出到控制台的处理器对象
    stream_handler = logging.StreamHandler()

    # 创建输出到日志文件的处理器对象
    # TimedRotatingFileHandler("文件名", when="日志切分间隔时间的单位", interval=间隔时间单位的个数, backupCount=保留日志文件的个数)
    file_handler = handlers.TimedRotatingFileHandler(filename, when, interval, backup_count, encoding="utf-8")

    # 创建日志信息格式
    # Formatter("%(日志信息产生的时间)s %(日志信息实际打印的级别)s [%(文件名)s(%(函数名/模块名)s:%(行号)d)] - %(日志信息)s")
    formatter = logging.Formatter("%(asctime)s %(levelname)s [%(filename)s(%(funcName)s:%(lineno)d)] - %(message)s")

    # 控制台处理器设置日志信息格式
    stream_handler.setFormatter(formatter)

    # 日志文件处理器设置日志信息格式
    file_handler.setFormatter(formatter)

    # 日志器对象添加控制台处理器
    logger.addHandler(stream_handler)

    # 日志器对象添加日志文件处理器
    logger.addHandler(file_handler)

def db_util(sql: str):
    conn = None
    cursor = None
    try:
        conn = pymysql.connect(host="121.43.169.97", user="student", password="P2P_student_2023", database="czbk_member", port=3306, charset="utf8")
        cursor = conn.cursor()
        cursor.execute(sql)
        if sql.split()[0].upper() == "SELECT":
            return cursor.fetchall()
        conn.commit()
        return conn.affected_rows()
    except Exception as e:
        init_log_config(LOG_PATH)
        logging.error(f"数据库ERROR: {e}")
        conn.rollback()
    finally:
        cursor.close()
        conn.close()

def clear_member_data(phone):
    db_util(f"DELETE FROM mb_member_info WHERE member_id = (SELECT id FROM mb_member WHERE phone = {phone});")
    db_util(f"DELETE FROM mb_member_login_log WHERE member_id = (SELECT id FROM mb_member WHERE phone = {phone});")
    db_util(f"DELETE FROM mb_member_register_log WHERE phone = {phone};")
    db_util(f"DELETE FROM mb_member WHERE phone = {phone};")




if __name__ == '__main__':
    print(read_data("register_data.json", "phone_code_api"))
    # print(read_data("login_data.json", "is_login_api"))
    print(db_util("delete from mb_member where phone = '131200234481'"))
