#! /usr/bin/python3
# coding=utf-8
# ******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#     http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Author: senlin
# Create: 2022-1-19
# ******************************************************************************/

import os
import re
import time
import stat
import yaml
import json

from src.libs.csvrw import CSVRW
from src.libs.logger import logger
from src.config import global_config
from src.release.analyse_repo_primary import AnalyseRepoPrimaryData
from src.config.constant import OECT_OUTPUT_PATH


class AllCommands(object):

    R_FLAG = os.O_RDONLY
    R_MODES = stat.S_IRUSR
    W_FLAG = os.O_WRONLY | os.O_CREAT
    W_MODES = stat.S_IWUSR | stat.S_IRUSR


    @staticmethod
    def process_load_src_bin_repodata(repos: dict, branch):
        """
        
        Args:
        
        Returns:
        
        """

        start = time.monotonic()
        out_put_path = OECT_OUTPUT_PATH
        leek_handle = AnalyseRepoPrimaryData()
        res_name_one = '%s_everything_and_epol_bin_2_src.csv' % branch
        res_name_two = '%s_multi_version_bin_2_src.csv' % branch
        everything_and_epol_bin_2_src = dict()
        multi_version_bin_2_src = dict()
        
        for repo in repos:
            repo_name = repo.get("name", "")
            gz_file_path = leek_handle.download_primary_xml(repo.get('repodata_path', ""), out_put_path)
            if not gz_file_path:
                logger.error("No primary.xml.gz downloaded")
                return
            
            primary_xml_file = leek_handle.gzip_file(gz_file_path)
            repo_arch = repo.get("arch", "")
            xml_etree_element, xml_name_spaces = leek_handle.load_primary_xml(primary_xml_file)
            if repo_name == 'baseos':
                everything_and_epol_bin_2_src.update(leek_handle.turn_baseos_primary_2_dict(xml_etree_element, \
                                                                                    xml_name_spaces, \
                                                                                    repo_name, \
                                                                                    repo_arch))
            elif repo_name in ["everything", "epol_main"]:
                everything_and_epol_bin_2_src.update(leek_handle.turn_primary_2_dict(xml_etree_element, \
                                                                            xml_name_spaces, \
                                                                            repo_name))
            else:
                multi_version_bin_2_src.update(leek_handle.turn_primary_2_dict(xml_etree_element, \
                                                                            xml_name_spaces, \
                                                                            repo_name))
            logger.info("==========Got %s bin_2src", repo_name)
    
        CSVRW.save_by_row(res_name_one, everything_and_epol_bin_2_src, save_encoding='gbk', save_mode='w+')
        CSVRW.save_by_row(res_name_two, multi_version_bin_2_src, save_encoding='gbk', save_mode='w+')

        logger.info("Load src repodata finshed!")
        used_time = time.monotonic() - start
        logger.info("used_time:%s, load primary " % (used_time))


    # @staticmethod
    def process_load_release_management_base_line(self, branch:str):
        """
        
        Args:
        
        Returns:
        
        """

        release_src_base = dict()
        res_csv_name = '_'.join([branch, 'release_base']) + '.csv'

        # everything
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_EVERYTHING_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'everything'
                
        # baseos
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_BASEOS_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'everything'

        # epol
        with os.fdopen(os.open(global_config.RELEASE_BASE_LINE_EPOL_YAML, self.R_FLAG, self.R_MODES), "r") as fout:
            yaml_data = yaml.safe_load(fout)
            everything_packages = yaml_data.get('packages')
            for package in everything_packages:
                pkg_name = package['name']
                release_src_base[pkg_name] = 'epol'
        
        csv_title = ["Package", 'project']
        CSVRW.save_by_row(res_csv_name, release_src_base, save_encoding='gbk', save_mode='w+')

        # # # 如果需要上传到oect，再打开
        # upload_files = [res_csv_name]
        # project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version=branch)
        # Command().osc_remove(project_record_path, upload_files)

    def process_load_master_base_line(self, directory):
        """
        
        Args:
        
        Returns:
        
        """

        release_src_base = dict()
        res_csv_name = '_'.join(['master', 'release_base']) + '.csv'
        
        for subdir in os.listdir(directory):
            subdir_path = os.path.join(directory, subdir)
            if os.path.isdir(subdir_path) and subdir not in ['delete', '.git']:
                baseline_file = os.path.join(subdir_path, 'pckg-mgmt.yaml')
                with os.fdopen(os.open(baseline_file, self.R_FLAG, self.R_MODES), "r") as fout:
                    yaml_data = yaml.safe_load(fout)
                    everything_packages = yaml_data.get('packages')
                    for package in everything_packages:
                        pkg_name = package['name']
                        release_src_base[pkg_name] = subdir
                        
        csv_title = ["Package", 'project']
        CSVRW.save_by_row(res_csv_name, release_src_base, save_encoding='gbk', save_mode='w+')

    def process_transfer_strategy_matrix_2_yaml(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_yaml_file_name = 'package_compilation_optimization_strategy_matrix.yaml'
        origin_matrix = dict()
        if not os.path.exists(res_yaml_file_name):
            logger.warning("local %s not exist", res_yaml_file_name)
            return False
        with os.fdopen(os.open(res_yaml_file_name, self.R_FLAG, self.R_MODES), "r") as fout:
            origin_matrix = yaml.safe_load(fout)
        
            
        csv_data, csv_title = CSVRW.read_2_dict(file)
        if not csv_data or not csv_title:
            logger.warning("Invalid input csv file")
            return False

        # additional_matrix = dict(filter(lambda x: x[1]['ccache_enable'] or \
        #                                      x[1]['cpu_minimum'] > 0 or \
        #                                      x[1]['memory_minimum'] != 'auto', csv_data.items()))
    
        additional_matrix = dict(filter(lambda x: x[1]['memory_minimum'] > 0, csv_data.items()))
        
        all_keys = origin_matrix.keys()
        for key, value in additional_matrix.items():
            if key in all_keys:
                origin_matrix[key]['memory_minimum'] = value['memory_minimum']
            else:
                origin_matrix[key] = value

        if os.path.exists(res_yaml_file_name):
            os.remove(res_yaml_file_name)
        with os.fdopen(os.open(res_yaml_file_name, self.W_FLAG, self.W_MODES), "w") as fout:
            yaml.dump(origin_matrix, fout)
        project_record_path = global_config.OE_PACKAGE_PROBLEMS_RECORD_PATH.format(oe_version="Ccache")
        upload_files = [res_yaml_file_name]
        # Command().osc_remove(project_record_path, upload_files)
        logger.info("Transfer %s finshed!", res_yaml_file_name)
        return True
    
    def process_transfer_strategy_matrix_2_csv(self, yaml_file):
        res_csv_file_name = 'test2.csv'
        origin_matrix = dict()
        if not os.path.exists(yaml_file):
            logger.warning("local %s not exist", yaml_file)
            return False
        with os.fdopen(os.open(yaml_file, self.R_FLAG, self.R_MODES), "r") as fout:
            origin_matrix = yaml.safe_load(fout)
            
        CSVRW.save_by_row(res_csv_file_name, origin_matrix)
        logger.info("Transfer %s finshed!", res_csv_file_name)
        return True

        
    @staticmethod
    def process_mock_ebs_web_post(query_body, url):
        
        import requests
        
        headers = {'Content-Type': 'application/json'}
        res = requests.post(url, json=query_body, headers=headers).json()
        hits = res.get('hits', {}).get('hits', [])
        if not hits:
            return
        _source_info = hits[0].get('_source', {})
        logger.info("_source_info: %s", _source_info)
        
    def process_get_ebs_users(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = 'active_users.csv'
        res_csv_data = dict()
        # dcg_dict = dict()
        with open(file, encoding='utf-8') as json_file_handler:
            owners = json.load(json_file_handler)
        
        for item in owners:
            logger.info(item)
            res_csv_data[item['_id']] = item['_source']['owner']
            
        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        logger.info(f"Transfer {res_csv_file_name} finshed!")
        
    def process_get_ebs_deps(self, file=''):
        """
        
        Args:
        
        Returns:
        
        """

        res_csv_file_name = 'ebs_deps.csv'
        res_csv_data = dict()
        # dcg_dict = dict()
        with open(file, encoding='utf-8') as json_file_handler:
            owners = json.load(json_file_handler)
        
        res_csv_data = owners.get("dependencies", {})
            
        if os.path.exists(res_csv_file_name):
            os.remove(res_csv_file_name)
        CSVRW.save_by_row(res_csv_file_name, res_csv_data, save_encoding='gbk', save_mode='w+')

        logger.info(f"Transfer {res_csv_file_name} finshed!")