import os
import sys
import time

import pandas as pd

import utils
import spider


def handle_group_task(task):
    hs_codes = utils.get_config_value(task, "hs_code")
    all_periods = utils.get_config_value(task, "periods")
    all_reporters = utils.get_config_value(task, "reporters")
    all_partners = utils.get_config_value(task, "partners")
    all_partner2nds = utils.get_config_value(task, "partner2nds")

    hs_code2industry = utils.get_config_value(task, "hs_code2industry")
    region_ids =  utils.get_config_value(task, "region2id")

    auth_file = utils.get_config_value(task, "auth_file")

    output_dir = utils.get_config_value(task, "output_dir")
    output_dir = os.path.join(output_dir, "group")

    output_filename_format = utils.get_config_value(task, "output_filename_format")
    

    # filter code that have industry type
    codes_by_ind = {}
    no_ind_codes = []
    for code_ in hs_codes:
        if code_ in hs_code2industry:
            for ind_ in hs_code2industry[code_]:
                if ind_ not in codes_by_ind:
                    codes_by_ind[ind_] = []

                codes_by_ind[ind_].append(code_)
        else:
            no_ind_codes.append(code_)

    useless_save_path = "./group_dl_no_ind_code.txt"
    print(f"{len(no_ind_codes)} hs code will not be searched, save as \'{useless_save_path}\'.")
    print(no_ind_codes)
    utils.save_no_industry_codes2txt(no_ind_codes, useless_save_path)

    # quick save & quick load
    progress_save_path = utils.get_config_value(task, "progress_save_file")

    saved_infos = utils.quick_load(progress_save_path)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # bad, should be clean
    for reporters in all_reporters:
        for industry in codes_by_ind:
            print(industry)

            used_partners = []
            if type(all_partners) == list:
                used_partners = all_partners
            elif type(all_partners) == dict:
                used_partners = [all_partners[industry]]

            else:
                print("Config file error.")
                exit(5)
            
            for periods in all_periods:
                for partners in used_partners:
                    for partner2nds in all_partner2nds:
                        hs_code = ",".join(codes_by_ind[industry])

                        try_save_info = f"{str(reporters)} {str(periods)} {str(hs_code)} {str(partners)} {str(partner2nds)} {str(industry)}"

                        # saved, pass
                        if try_save_info in saved_infos:
                            print(f"{try_save_info} has been searched, pass.")
                            continue
                        
                        print(try_save_info)

                        csv_filename = utils.generate_output_filename(output_filename_format, "", reporters, periods, industry)

                        csv_path = os.path.join(output_dir, csv_filename)

                        data = run_base_spider(hs_code, periods, reporters, partners,  partner2nds, region_ids, auth_file)

                        # run over, save
                        utils.save_raw_data2csv(data, csv_path)
                        utils.quick_save(try_save_info, progress_save_path)

                        time.sleep(2)

    # saved_codes = utils.quick_load_code(progress_save_path)
    # print(f"{len(saved_codes)} code has been searched.")
    for reporters in all_reporters:
        for industry in codes_by_ind:
            dfs = []
            for periods in all_periods:
                csv_filename = utils.generate_output_filename(output_filename_format, "", reporters, periods, industry)

                csv_path = os.path.join(output_dir, csv_filename)

                df = pd.read_csv(csv_path)
                dfs.append(df)
            
            final_df = pd.concat(dfs, axis=0).reset_index(drop=True)
            final_filename = utils.generate_output_filename(output_filename_format, "", reporters, "", industry)
            final_path = os.path.join(output_dir, final_filename)
            utils.save_raw_data2csv(final_df, final_path)


def handle_base_task(task):
    hs_codes = utils.get_config_value(task, "hs_code")
    all_periods = utils.get_config_value(task, "periods")
    all_reporters = utils.get_config_value(task, "reporters")
    all_partners = utils.get_config_value(task, "partners")
    all_partner2nds = utils.get_config_value(task, "partner2nds")

    hs_code2industry = utils.get_config_value(task, "hs_code2industry")
    region_ids =  utils.get_config_value(task, "region2id")

    auth_file = utils.get_config_value(task, "auth_file")

    output_dir = utils.get_config_value(task, "output_dir")

    output_filename_format = utils.get_config_value(task, "output_filename_format")
    # group_by

    # filter code that have industry type
    ind_codes = []
    no_ind_codes = []
    for code_ in hs_codes:
        if code_ in hs_code2industry:
            for ind_ in hs_code2industry[code_]:
                ind_codes.append({
                    "hs_code": code_,
                    "industry": ind_
                })
        else:
            no_ind_codes.append(code_)

    useless_save_path = "./base_dl_no_ind_code.txt"
    print(f"{len(no_ind_codes)} hs code will not be searched, save as \'{useless_save_path}\'.")
    print(no_ind_codes)
    utils.save_no_industry_codes2txt(no_ind_codes, useless_save_path)

    # quick save & quick load
    progress_save_path = utils.get_config_value(task, "progress_save_file")

    saved_infos = utils.quick_load(progress_save_path)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # bad, should be clean
    for reporters in all_reporters:
        for periods in all_periods:
            for ind_code in ind_codes:
                hs_code = ind_code["hs_code"]
                industry = ind_code["industry"]
                
                used_partners = []
                if type(all_partners) == list:
                    used_partners = all_partners
                elif type(all_partners) == dict:
                    used_partners = [all_partners[industry]]

                else:
                    print("Config file error.")
                    exit(5)

                for partners in used_partners:
                    for partner2nds in all_partner2nds:
                        try_save_info = f"{str(reporters)} {str(periods)} {str(hs_code)} {str(partners)} {str(partner2nds)} {str(industry)}"

                        # saved, pass
                        if try_save_info in saved_infos:
                            print(f"{try_save_info} has been searched, pass.")
                            continue
                        
                        print(try_save_info)

                        csv_filename = utils.generate_output_filename(output_filename_format, hs_code, reporters, periods, industry)

                        csv_path = os.path.join(output_dir, csv_filename)

                        # print(hs_code, periods, reporters, partners, partner2nds)

                        data = run_base_spider(hs_code, periods, reporters, partners, partner2nds, region_ids, auth_file)

                        # run over, save
                        utils.save_raw_data2csv(data, csv_path)
                        utils.quick_save(try_save_info, progress_save_path)

                        time.sleep(2)

    # saved_codes = utils.quick_load_code(progress_save_path)
    # print(f"{len(saved_codes)} code has been searched.")


def run_base_spider(hs_code, periods, reporters, partners, partner2nds, region_ids, auth_file):
    additional_params = spider.set_additional_request_params(
        periods,
        reporters,
        partners,
        partner2nds,
        region_ids
    )

    data = spider.request_trade_data(hs_code, additional_params, auth_file)

    return data


if __name__ == "__main__":
    group_by_industry = False

    print(sys.argv)
    if len(sys.argv) > 1:  
        file_path = sys.argv[1]
        if len(sys.argv) > 2:
            if sys.argv[2] == "industry":
                group_by_industry = True

    else:  
        file_path = "./task/task.json"

    task = utils.read_task_from_file(file_path)
    if group_by_industry:
        handle_group_task(task)
    else:
        handle_base_task(task)
