#! /usr/bin/python3
# coding=utf-8
# ******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#     http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Author: senlin
# Create: 2022-1-19
# ******************************************************************************/

import os
import re
import time
import stat
import yaml
import json
from lxml import etree
from src.libs.csvrw import CSVRW
from src.api.obs import OpenSuseOBSAPI
from src.api.gitee import GiteeAPI
from src.libs.logger import logger
from src.config import constant, global_config
from src.libs.base import http

from src.libs.common_commands import Command
from src.release.analyse_repo_primary import AnalyseRepoPrimaryData
from src.release.check_lang import IdentifyBUildDependLanguage
from src.release.openeuler_repo_analyse import RepoAnalyse
from src.libs.exception import OectProcessEventException
from src.libs.analyse_bad_status_package import AnalyseBadPackage
from src.config.constant import OECT_OUTPUT_PATH


class AllCommands(object):

    R_FLAG = os.O_RDONLY
    R_MODES = stat.S_IRUSR
    W_FLAG = os.O_WRONLY | os.O_CREAT
    W_MODES = stat.S_IWUSR | stat.S_IRUSR
    
    @classmethod
    def process_refresh_enterprise_full_sig_repos(cls, res_file=''):
        """
        
        Args:
        
        Returns:
        
        """

        from src.community.openeuler_community_info_query import OpenEulerCommunityRepoInfoQuery
        sig_info_query = OpenEulerCommunityRepoInfoQuery()
        src_pkg_data = sig_info_query.query_full_sig_repos()
        
        if not res_file:
            res_file = global_config.GITEE_OE_FULL_REPO_SIGS
        elif res_file == 'latest':
            res_file = global_config.GITEE_LATEST_OE_FULL_REPO_SIGS
        else:
            logger.error("Invalid parameter: %s", res_file)
            return 
        
        if os.path.exists(res_file):
            os.remove(res_file)
            
        with os.fdopen(os.open(res_file, cls.W_FLAG, cls.W_MODES), "w") as fout:
            yaml.dump(src_pkg_data, fout)
        logger.info(f"Refresh {res_file} finshed!")
        
        # 如果需要刷新一份csv，再打开
        res_csv_name = "latest_openEuler_full_repos.csv"
        csv_title = ["Package", "assignee", "assignee_id", "repo_id", 'sig']
        # csv_title = ["Package", "Sig", "Maintainer"]
        CSVRW.save_by_row(res_csv_name, src_pkg_data, csv_title)

        # 如果需要上传到oect，再打开
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="master")
        # upload_files = [global_config.GITEE_OE_FULL_REPO_SIGS]
        # Command().osc_remove(project_record_path, upload_files)


    @staticmethod
    def process_list_community_repos():
        """
        
        Args:
        
        Returns:
        
        """
        
        orgs = ['src-anolis-epao']
        gitee_api_handle = GiteeAPI(version='v5')
        gitee_api_handle.get_community_repos(orgs)
        

    @staticmethod
    def process_analyse_packages_language(packages_spec_path, packages=None):
        """
        analyse languages of packages
        Args:
            packages_spec_path:
            packages:
        Returns:
        
        """
        res_csv_file_name = 'Packages_Languages.csv'
        csv_title = ['Package', 'Languages', 'Detail']
        packages_languages = []
        ana = IdentifyBUildDependLanguage(packages_spec_path)
        if not packages:
            specs = os.listdir(packages_spec_path)
            packages = [spec_file.replace('.spec', '') for spec_file in specs]

        for pkg in packages:
            build_requires, build_prase = ana.get_build_info(pkg)
            lamguages, detail = ana.analyse_languages(build_requires, build_prase)
            packages_languages.append([pkg, lamguages, detail])
    
        CSVRW.save_by_row(res_csv_file_name, packages_languages, csv_title)

    @staticmethod
    def process_create_package_build_issue(ori_args):
        gitee_handle = GiteeAPI(version='v8', enterprise='openEuler')
        import argparse
        args = ori_args
        if isinstance(ori_args, argparse.Namespace):
            args = vars(ori_args)
        gitee_handle.create_issues(args)
        return True
    
    @staticmethod
    def process_update_issue(ori_args):
        gitee_handle = GiteeAPI(version='v8', enterprise='openEuler')
        import argparse
        args = ori_args
        if isinstance(ori_args, argparse.Namespace):
            args = vars(ori_args)
        gitee_handle.update_issue(args)
        return True
    
    @staticmethod
    def process_refresh_enterprise_member_ids():
        gitee_handle = GiteeAPI(version='v8', enterprise='openEuler')
        gitee_handle.refresh_enterprise_member_ids_v8()
        
    @staticmethod
    def process_refresh_enterprise_projects_id():
        gitee_handle = GiteeAPI(version='v8', enterprise='openEuler')
        gitee_handle.refresh_enterprise_projects_id()

    @staticmethod
    def process_get_repo_from_openeuler(branch, repo_dir, repo_types, repo_arch):
        """
        generate rpm info list of specific repo
        Args:
        
        Returns:
        
        """
        
        repo_analyse_handle = RepoAnalyse()
        __, __, result_csv_name = repo_analyse_handle.crawling_rpm_list_with_repos(
            repo_url = constant.OPENEULER_REPO_URL,
            branch = branch,
            repo_dir = repo_dir,
            repo_types = repo_types,
            repo_arch = repo_arch)

        # # 如果需要上传到oect，再打开
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=branch)
        # upload_files = [result_csv_name]
        # Command().osc_remove(project_record_path, upload_files)

    @staticmethod
    def process_diff_2_repos_from_obs(old_branch, new_branch, repo_types, repo_arch):
        """ 
        Preparation and processing of the main entrance.
        """
        
        result_file_name = f'{new_branch}_diff_{old_branch}.csv'
        repo_analyse_handle = RepoAnalyse()
        new_pkgs, new_pkgs_info, new_crawling_res_file = repo_analyse_handle.crawling_rpm_list_with_repos(
            repo_url = constant.EBS_DAILYBUILD_URL,
            branch = new_branch,
            repo_dir = 'rc6_openeuler-2023-06-29-20-25-36',
            repo_types = repo_types,
            repo_arch = repo_arch)

        old_pkgs, old_pkgs_info, old_crawling_res_file = repo_analyse_handle.crawling_rpm_list_with_repos(
            repo_url = constant.OPENEULER_REPO_URL,
            branch = old_branch,
            repo_dir = '',
            repo_types = repo_types,
            repo_arch = repo_arch)

        full_pkgs = new_pkgs.union(old_pkgs)
        repo_analyse_handle.diff2repo_version(full_pkgs, old_pkgs_info, new_pkgs_info, result_file_name)

        # # 如果需要上传到oect，再打开
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=new_branch)
        # upload_files = [result_file_name, new_crawling_res_file, old_crawling_res_file]
        # Command().osc_remove(project_record_path, upload_files)


    @staticmethod
    def process_get_source_list(projects, branch='master', save = True):
        """
        
        Args:
        
        Returns:
        
        """

        pvd = OpenSuseOBSAPI()
        pvd.get_source_package(projects, save=True)

    @staticmethod
    def process_get_obs_project_packages_status_detail(branch, project, need_status):
        """
        get all of packaegs, which status are unresolvable/failed
        Args:
            project: specific project
            branch: specific branch, decides the project from mapped table
            check_code: status list: 'finished', 'failed', 'unresolvable'
        Returns:
        
        """
        from src.community.openeuler_community_info_query import sig_info_query

        unresolvable_info = dict()
        result_data = dict()
        res_sig_names = set()

        # 获取完整的待处理project列表
        if branch:
            if not project:
                # Get the mapped project list
                project = constant.GITEE_BRANCH_PROJECT_MAPPING[branch]
                res_csv_name = '_'.join([branch, 'problematic_package.csv'])
                res_unres_csv_name = '_'.join([branch, 'analyse_unresolvable.csv'])
            else:
                res_csv_name = 'problematic_package.csv'
                res_unres_csv_name = "analyse_unresolvable.csv"
        else:
            raise OectProcessEventException(101)

        pvd = OpenSuseOBSAPI()
        # 遍历project列表
        for proj in project:
            url = pvd._url(url="build/{project}/_result", project=proj)
            logger.info(url)
            response = http.get(url, auth=pvd._auth)

            if response.status_code != 200:
                logger.error(f"response.status_code: {response.status_code}")
                continue

            project_etree_element = etree.HTML(response.text)
            pkgs, _bad_details = pvd.parse_build_result(proj, project_etree_element, unresolvable_info, res_sig_names, need_status)
            result_data.update(_bad_details)

        if not result_data:
            logger.warning("No Statistics For Package Problem in %s", project)
            return [], [], []

        CSVRW.save_by_row(res_csv_name, result_data, save_encoding='gbk')
        unresolvable_details = AnalyseBadPackage.analyse_unresolvable(unresolvable_info, branch)
        CSVRW.save_by_row(res_unres_csv_name, unresolvable_details, save_encoding='gbk')
        sig_info_query.analyse_and_upload_notify_emails(res_sig_names)

        # # 如果需要上传到oect，再打开
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=branch)
        # upload_files = [res_csv_name, res_unres_csv_name, 'notify_emails.txt']
        # Command().osc_remove(project_record_path, upload_files)
        
        logger.info("Generate report for bad packages of %s", project)
        return pkgs, _bad_details, unresolvable_details


    @staticmethod
    def process_load_src_bin_repodata(repos: dict, branch):
        """
        
        Args:
        
        Returns:
        
        """

        start = time.monotonic()
        out_put_path = OECT_OUTPUT_PATH
        leek_handle = AnalyseRepoPrimaryData()
        res_name_one = '%s_everything_and_epol_bin_2_src.csv' % branch
        res_name_two = '%s_multi_version_bin_2_src.csv' % branch
        everything_and_epol_bin_2_src = dict()
        multi_version_bin_2_src = dict()
        
        for repo in repos:
            repo_name = repo.get("name", "")
            gz_file_path = leek_handle.download_primary_xml(repo.get('repodata_path', ""), out_put_path)
            if not gz_file_path:
                logger.error("No primary.xml.gz downloaded")
                return
            
            primary_xml_file = leek_handle.gzip_file(gz_file_path)
            repo_arch = repo.get("arch", "")
            xml_etree_element, xml_name_spaces = leek_handle.load_primary_xml(primary_xml_file)
            if repo_name == 'baseos':
                everything_and_epol_bin_2_src.update(leek_handle.turn_baseos_primary_2_dict(xml_etree_element, \
                                                                                    xml_name_spaces, \
                                                                                    repo_name, \
                                                                                    repo_arch))
            elif repo_name in ["everything", "epol_main"]:
                everything_and_epol_bin_2_src.update(leek_handle.turn_primary_2_dict(xml_etree_element, \
                                                                            xml_name_spaces, \
                                                                            repo_name))
            else:
                multi_version_bin_2_src.update(leek_handle.turn_primary_2_dict(xml_etree_element, \
                                                                            xml_name_spaces, \
                                                                            repo_name))
            logger.info("==========Got %s bin_2src", repo_name)
        # # everything
        # main_etree_element, main_name_spaces = \
        #      leek_handle.load_primary_xml(global_config.DAILY_EVERYTHING_PRIMARY_XML.format(branch=branch))
        # full_binary_src_dict.update(leek_handle.turn_primary_2_dict(main_etree_element, main_name_spaces, \
        #                                                  'everything'))
        # # baseos
        # baseos_etree_element, main_name_spaces = \
        #      leek_handle.load_primary_xml(global_config.DAILY_BASEOS_PRIMARY_XML.format(branch=branch))
        # full_binary_src_dict.update(leek_handle.turn_baseos_primary_2_dict(baseos_etree_element, main_name_spaces, \
        #                                                  'baseos', arch))
        
        # # epol
        # epol_etree_element, epol_name_spaces = \
        #      leek_handle.load_primary_xml(primary_xml_file)
        # full_binary_src_dict.update(leek_handle.turn_primary_2_dict(epol_etree_element, epol_name_spaces, \
        #                                                  'epol'))
        # # multi-version
        # epol_multi_version_etree_element, epol_name_spaces = \
        #      leek_handle.load_primary_xml(global_config.MULTI_VERSION_PRIMARY_XML)
        # full_binary_src_dict.update(leek_handle.turn_primary_2_dict(epol_multi_version_etree_element, epol_name_spaces, \
        #                                                  'epol_multi_version'))

        CSVRW.save_by_row(res_name_one, everything_and_epol_bin_2_src, save_encoding='gbk', save_mode='w+')
        CSVRW.save_by_row(res_name_two, multi_version_bin_2_src, save_encoding='gbk', save_mode='w+')

        # # 如果需要上传到oect，再打开
        # upload_files = [res_name]
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=branch)
        # Command().osc_remove(project_record_path, upload_files)

        logger.info("Load src repodata finshed!")
        used_time = time.monotonic() - start
        logger.info("used_time:%s, load primary " % (used_time))

    @staticmethod
    def process_load_src_bin_repodata2(repo_url, release):
        """
        download primary.xml and load bin_2_src
        Args:
        
        Returns:
        
        """
        
        start = time.monotonic()
        leek_handle = AnalyseRepoPrimaryData()

        res_name = '%s_full_bin_2_src.csv' % release
        full_binary_src_dict = dict()
        
        out_put_path = OECT_OUTPUT_PATH
        gz_file_path = leek_handle.download_primary_xml(repo_url, out_put_path)
        if not gz_file_path:
            logger.error("No primary.xml.gz downloaded")
            return
        primary_xml_file = leek_handle.gzip_file(gz_file_path)
        leek_handle.load_bin_2_src(primary_xml_file, repo_url)
        
        

    # @staticmethod
    def process_load_release_management_base_line(self, branch:str):
        """
        
        Args:
        
        Returns:
        
        """

        release_src_base = dict()
        res_csv_name = '_'.join([branch, 'release_base']) + '.csv'

        # everything
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_EVERYTHING_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'everything'
                
        # baseos
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_BASEOS_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'everything'

        # epol
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_EPOL_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'epol'
        
        csv_title = ["Package", 'project']
        CSVRW.save_by_row(res_csv_name, release_src_base, save_encoding='gbk', save_mode='w+')

        # # # 如果需要上传到oect，再打开
        # upload_files = [res_csv_name]
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=branch)
        # Command().osc_remove(project_record_path, upload_files)

    def process_load_master_base_line(self, directory):
        """
        
        Args:
        
        Returns:
        
        """

        release_src_base = dict()
        res_csv_name = '_'.join(['master', 'release_base']) + '.csv'
        
        for subdir in os.listdir(directory):
            subdir_path = os.path.join(directory, subdir)
            if os.path.isdir(subdir_path) and subdir not in ['delete', '.git']:
                baseline_file = os.path.join(subdir_path, 'pckg-mgmt.yaml')
                with os.fdopen(os.open(baseline_file, self.R_FLAG, self.R_MODES), "r") as fout:
                    yaml_data = yaml.safe_load(fout)
                    everything_packages = yaml_data.get('packages')
                    for package in everything_packages:
                        pkg_name = package['name']
                        release_src_base[pkg_name] = subdir
                        
        csv_title = ["Package", 'project']
        CSVRW.save_by_row(res_csv_name, release_src_base, save_encoding='gbk', save_mode='w+')

    def process_transfer_strategy_matrix_2_yaml(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_yaml_file_name = 'package_compilation_optimization_strategy_matrix.yaml'
        origin_matrix = dict()
        if not os.path.exists(res_yaml_file_name):
            logger.warning("local %s not exist", res_yaml_file_name)
            return False
        with os.fdopen(os.open(res_yaml_file_name, self.R_FLAG, self.R_MODES), "r") as fout:
            origin_matrix = yaml.safe_load(fout)
        
            
        csv_data, csv_title = CSVRW.read_2_dict(file)
        if not csv_data or not csv_title:
            logger.warning("Invalid input csv file")
            return False

        # additional_matrix = dict(filter(lambda x: x[1]['ccache_enable'] or \
        #                                      x[1]['cpu_minimum'] > 0 or \
        #                                      x[1]['memory_minimum'] != 'auto', csv_data.items()))
    
        additional_matrix = dict(filter(lambda x: x[1]['memory_minimum'] > 0, csv_data.items()))
        
        all_keys = origin_matrix.keys()
        for key, value in additional_matrix.items():
            if key in all_keys:
                origin_matrix[key]['memory_minimum'] = value['memory_minimum']
            else:
                origin_matrix[key] = value

        if os.path.exists(res_yaml_file_name):
            os.remove(res_yaml_file_name)
        with os.fdopen(os.open(res_yaml_file_name, self.W_FLAG, self.W_MODES), "w") as fout:
            yaml.dump(origin_matrix, fout)
        project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="Ccache")
        upload_files = [res_yaml_file_name]
        # Command().osc_remove(project_record_path, upload_files)
        logger.info("Transfer %s finshed!", res_yaml_file_name)
        return True
    
    def process_transfer_strategy_matrix_2_csv(self, yaml_file):
        res_csv_file_name = 'test2.csv'
        origin_matrix = dict()
        if not os.path.exists(yaml_file):
            logger.warning("local %s not exist", yaml_file)
            return False
        with os.fdopen(os.open(yaml_file, self.R_FLAG, self.R_MODES), "r") as fout:
            origin_matrix = yaml.safe_load(fout)
            
        CSVRW.save_by_row(res_csv_file_name, origin_matrix)
        logger.info("Transfer %s finshed!", res_csv_file_name)
        return True
        
    def process_transfer_workers_2_yaml(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_yaml_file_name = 'w1_test_workers.yaml'
        if os.path.exists(res_yaml_file_name):
            os.remove(res_yaml_file_name)

        import csv
        import yaml

        # 读取CSV文件并转换为列表
        with open(file, 'r') as f:
            reader = csv.reader(f)
            data = list(reader)

        # 去除数据中的标题行
        header = data[0]
        data = data[1:]

        # 将数据转换为字典格式
        data_dict = dict()
        for row in data:
            if not data_dict.get(row[0], {}):
                data_dict[row[0]] = dict()
            data_dict[row[0]][row[1]] = dict()
            
            for i in range(2, len(header)):
                data_dict[row[0]][row[1]][header[i]] = row[i]

        # 将数据写入YAML文件
        with open(res_yaml_file_name, 'w') as f:
            f.write(yaml.dump(data_dict))
            
        project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="Ccache")
        upload_files = [res_yaml_file_name]
        Command().osc_remove(project_record_path, upload_files)
        logger.info("Transfer %s finshed!", res_yaml_file_name)
        return True
    
    def process_transfer_build_time_2_yaml(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_yaml_file_name = 'everything_spec_name_build_time.yaml'
        csv_data, csv_title = CSVRW.read_build_time_2_dict(file)
        if not csv_data or not csv_title:
            logger.warning("Invalid input csv file")
            return False
        
        if os.path.exists(res_yaml_file_name):
            os.remove(res_yaml_file_name)

        with os.fdopen(os.open(res_yaml_file_name, self.W_FLAG, self.W_MODES), "w") as fout:
            yaml.dump(csv_data, fout)

        project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="Ccache")
        upload_files = [res_yaml_file_name]
        Command().osc_remove(project_record_path, upload_files)
        logger.info("Transfer %s finshed!", res_yaml_file_name)
        spec_name_bt = dict()
        with os.fdopen(os.open(res_yaml_file_name, self.R_FLAG, self.R_MODES), "r") as fout:
            spec_name_bt = yaml.safe_load(fout)
            
        spec_names = list(spec_name_bt.keys())
        spec_names.sort( key = spec_name_bt.__getitem__ , reverse=True)
        # logger.info("sorted list: %s", spec_names)
        return True
    
    def process_refresh_oe_repo_assignee_yaml(self):
        """
        
        Args:
        
        Returns:
        
        """

        if os.path.exists(global_config.LOCAL_OE_REPO_ASSIGNEE_YAML):
            os.remove(global_config.LOCAL_OE_REPO_ASSIGNEE_YAML)
            
        csv_data, csv_title = CSVRW.read_build_time_2_dict(global_config.LOCAL_OPENEULER_OWNERS)
        if not csv_data or not csv_title:
            logger.warning("Invalid input csv file")
            return False
        
        with os.fdopen(os.open(global_config.LOCAL_OE_REPO_ASSIGNEE_YAML, self.W_FLAG, self.W_MODES), "w") as fout:
            yaml.dump(csv_data, fout)

        logger.info("Transfer %s finshed!", global_config.LOCAL_OE_REPO_ASSIGNEE_YAML)
        
        return True
        
    def process_transfer_everything_build_time(self, file, repo, arch):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = '%s_%s_build_time.csv' % (repo, arch)
        res_csv_data = dict()
        spec_builds = dict()
        with open(file, "r") as json_file_handler:
            spec_builds = json.load(json_file_handler)
        
        spec_time = spec_builds.get('_source', {}).get("spec_time", {})
        spec_build_times = spec_time.get("specs", {})
        
        for spec, bt in spec_build_times.items():
            spec_name = spec.split('.spec')[0]
            res_csv_data[spec_name] = {arch: bt}

        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        # # 如果需要上传到oect，再打开
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="Ccache")
        # upload_files = [res_csv_file_name]
        # Command().osc_remove(project_record_path, upload_files)
        
        logger.info(f"Transfer {res_csv_file_name} finshed!")

    def process_get_out_dep_nums(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = 'everything_out_dep_nums.csv'
        res_csv_data = dict()
        dcg_dict = dict()
        with open(file, "r") as json_file_handler:
            dcg_dict = json.load(json_file_handler)
        
        for spec_name, spec_deps in dcg_dict.items():
            res_csv_data[spec_name]  = {'out_dep_num': 0}
            res_csv_data[spec_name]['out_dep_num'] = len(spec_deps.get('out_dep', []))
            
        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        logger.info(f"Transfer {res_csv_file_name} finshed!")
        
    @staticmethod
    def process_mock_ebs_web_post(query_body, url):
        
        import requests
        
        headers = {'Content-Type': 'application/json'}
        res = requests.post(url, json=query_body, headers=headers).json()
        hits = res.get('hits', {}).get('hits', [])
        if not hits:
            return
        _source_info = hits[0].get('_source', {})
        logger.info("_source_info: %s", _source_info)
        
    def process_get_ebs_users(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = 'active_users.csv'
        res_csv_data = dict()
        # dcg_dict = dict()
        with open(file, encoding='utf-8') as json_file_handler:
            owners = json.load(json_file_handler)
        
        for item in owners:
            logger.info(item)
            res_csv_data[item['_id']] = item['_source']['owner']
            
        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        logger.info(f"Transfer {res_csv_file_name} finshed!")
        
    def process_get_ebs_deps(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = 'ebs_deps.csv'
        res_csv_data = dict()
        # dcg_dict = dict()
        with open(file, encoding='utf-8') as json_file_handler:
            owners = json.load(json_file_handler)
        
        res_csv_data = owners.get("dependencies", {})
            
        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        logger.info(f"Transfer {res_csv_file_name} finshed!")