import json
import csv
from ppts.core.TestUtil import base_path
import os


# 将性能数据转换成通用的json格式数据，以便写入不同类型的报告文件中
def parse_result_data(test_config):
    test_urls = test_config.test_urls
    if test_urls is None or len(test_urls) == 0:
        return
    json_data = dict()
    # 写入基本信息
    json_data['report_name'] = test_config.report_name
    json_data['browser_type'] = test_config.browser_type
    json_data['no_headless'] = test_config.no_headless
    json_data['pass_time'] = test_config.pass_time
    json_data['challenge_time'] = test_config.challenge_time
    json_data['repeat'] = test_config.repeat
    json_data['cache'] = test_config.cache
    json_data['page_load_timeout'] = test_config.page_load_timeout
    json_data['start_time'] = test_config.start_time
    json_data['complete_time'] = test_config.complete_time
    json_data['use_time'] = test_config.use_time
    json_data['data'] = []
    for test_url in test_urls:
        details = dict()
        if test_url.collect_data and len(test_url.pts_data) > 0:
            details['is_pass'] = test_url.is_pass
            details['pass_time'] = test_url.pass_time
            details['is_challenge'] = test_url.is_challenge
            details['challenge_time'] = test_url.challenge_time
            details['name'] = test_url.name
            details['url'] = test_url.url
            details['total_count'] = len(test_url.pts_data)
            details['request_time'] = dict()
            details['request_time']['max'] = test_url.pts_max_data.request_time
            details['request_time']['min'] = test_url.pts_min_data.request_time
            details['request_time']['avg'] = test_url.pts_avg_data.request_time
            details['redirect_time'] = dict()
            details['redirect_time']['max'] = test_url.pts_max_data.redirect_time
            details['redirect_time']['min'] = test_url.pts_min_data.redirect_time
            details['redirect_time']['avg'] = test_url.pts_avg_data.redirect_time
            details['domain_lookup_time'] = dict()
            details['domain_lookup_time']['max'] = test_url.pts_max_data.domain_lookup_time
            details['domain_lookup_time']['min'] = test_url.pts_min_data.domain_lookup_time
            details['domain_lookup_time']['avg'] = test_url.pts_avg_data.domain_lookup_time
            details['connect_time'] = dict()
            details['connect_time']['max'] = test_url.pts_max_data.connect_time
            details['connect_time']['min'] = test_url.pts_min_data.connect_time
            details['connect_time']['avg'] = test_url.pts_avg_data.connect_time
            details['dom_render_time'] = dict()
            details['dom_render_time']['max'] = test_url.pts_max_data.dom_render_time
            details['dom_render_time']['min'] = test_url.pts_min_data.dom_render_time
            details['dom_render_time']['avg'] = test_url.pts_avg_data.dom_render_time
            details['blank_screen_time'] = dict()
            details['blank_screen_time']['max'] = test_url.pts_max_data.blank_screen_time
            details['blank_screen_time']['min'] = test_url.pts_min_data.blank_screen_time
            details['blank_screen_time']['avg'] = test_url.pts_avg_data.blank_screen_time
            details['page_load_time'] = dict()
            details['page_load_time']['max'] = test_url.pts_max_data.page_load_time
            details['page_load_time']['min'] = test_url.pts_min_data.page_load_time
            details['page_load_time']['avg'] = test_url.pts_avg_data.page_load_time

            details['request_time']['list'] = []
            details['redirect_time']['list'] = []
            details['domain_lookup_time']['list'] = []
            details['connect_time']['list'] = []
            details['dom_render_time']['list'] = []
            details['blank_screen_time']['list'] = []
            details['page_load_time']['list'] = []

            # 请求接口信息
            details['page_request_failed_count'] = test_url.page_request_failed_count
            details['single_request_details'] = test_url.single_request_details

            details['test_list'] = []

            if len(test_url.pts_data) > 0:
                for pd in test_url.pts_data:
                    details['test_list'].append({
                        'success': pd.success,
                        'error_msg': pd.error_msg,
                        'screenshot_path': pd.screenshot_path,
                        'request_time': pd.request_time,
                        'redirect_time': pd.redirect_time,
                        'domain_lookup_time': pd.domain_lookup_time,
                        'connect_time': pd.connect_time,
                        'dom_render_time': pd.dom_render_time,
                        'blank_screen_time': pd.blank_screen_time,
                        'page_load_time': pd.page_load_time,
                        'start_time': pd.start_time,
                        'use_time': pd.use_time
                    })
                    i = 0
                    if pd.success:
                        i = i + 1
                        details['request_time']['list'].append(pd.request_time)
                        details['redirect_time']['list'].append(pd.redirect_time)
                        details['domain_lookup_time']['list'].append(pd.domain_lookup_time)
                        details['connect_time']['list'].append(pd.connect_time)
                        details['dom_render_time']['list'].append(pd.dom_render_time)
                        details['blank_screen_time']['list'].append(pd.blank_screen_time)
                        details['page_load_time']['list'].append(pd.page_load_time)
                    details['success_count'] = i
            json_data['data'].append(details)
            json_data['url_count'] = len(json_data['data'])
    return json_data


# 生成json文件
def generate_json_file(json_data, report_path):
    report_path = report_path + '.json'
    with open(report_path, 'w', encoding='utf-8') as f:
        f.write(json.dumps(json_data, sort_keys=False, indent=4, ensure_ascii=False))
    return report_path


# 生成csv文件
def generate_csv_file(json_data, report_path):
    report_path = report_path + '.csv'
    headers = ['', 'request_time', 'redirect_time', 'domain_lookup_time', 'connect_time', 'dom_render_time',
               'blank_screen_time', 'page_load_time']
    rows = []
    for d in json_data['data']:
        data = ('{}[{}]'.format(d['name'], d['url']), d['request_time']['avg'], d['redirect_time']['avg'],
                d['domain_lookup_time']['avg'], d['connect_time']['avg'],d['dom_render_time']['avg'],
                d['blank_screen_time']['avg'], d['page_load_time']['avg'])
        rows.append(data)
    with open(report_path, 'w', encoding='gbk', newline='') as f:
        writer = csv.writer(f)
        writer.writerow(headers)
        writer.writerows(rows)
    return report_path


# 生成xlsx结尾的excel文件
def generate_excel_file(json_data, report_path):
    pass


# 生成html模板报告
def generate_html_file(json_data, report_path):
    report_path = report_path + '_测试报告.html'
    template_path = os.path.join(os.path.join(base_path, 'template'), 'report.html')
    file_object = open(template_path, encoding='utf-8')
    try:
        file_context = file_object.read()
        file_context = file_context.replace('#JSON_DATA_STR#', json.dumps(json_data, sort_keys=False, ensure_ascii=False))
        with open(report_path, 'w', encoding='utf-8') as f:
            f.write(file_context)
    finally:
        file_object.close()
    return report_path

