import json
from dataclasses import dataclass  # only support python 3.7+
import logging
from typing import Union, List, Any
from abc import abstractmethod, ABCMeta

import lightgbm as lgb
import xgboost as xgb
from lightgbm.sklearn import LGBMRegressor, LGBMClassifier
from sklearn2pmml import PMMLPipeline, sklearn2pmml

from utils_hdfk_basic import LANGUAGE_SUFFIXES, qrepr, get_code_banner, get_file_suffix
from utils_hdfk_io import dump_pkl, dump_json, dump_m, dump_txt, load_pkl, load_json, load_m, load_txt

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# __all__ = [
#     'AbstractParser',
#     'TreeModelParser',
#     'LGBModelParser',
#     'XGBModelParser']


class AbstractParser(metaclass=ABCMeta):
    @abstractmethod
    def parse(self, file_name: str, lang: str) -> None:
        """
        Parse the parser input to a code file and provide some useful function.

        :param file_name: output code file
        :param lang: output language
        :return: None (IO side effect: generate a code file)
        """
        pass


class TreeModelParser(AbstractParser):
    """
    Abstract tree models parser.
    """

    @classmethod
    @abstractmethod
    def load_from(cls, gbm: str, features: List[str]):
        """
        Construct models  parser from models file.

        :param gbm: models file
        :param features: models variables
        :return:
        """
        pass

    @abstractmethod
    def parse_one_tree(self, tree: Any, idx: int, lang: str) -> str:
        """
        Parse one tree to a predict function.

        :param tree: object with tree structure
        :param idx: tree index
        :param lang: output language
        :return: code string of a predict function
        """
        pass

    @abstractmethod
    def parse_all_trees(self, trees: Any, lang: str) -> str:
        """
        Parse all trees to many predict functions.

        :param trees: objects with tree structure
        :param lang: output language
        :return: code string of many predict functions
        """
        pass

    @abstractmethod
    def logit(self, lang: str) -> str:
        """
        Logistic function to summary all the predictions.

        :param lang: output language
        :return: code string
        """
        pass

    @abstractmethod
    def parse(self, file_name: str, lang: str) -> None:
        """
        Parse the whole models to a module file and provide the equivalent predict function.

        :param file_name: output file name
        :param lang: output language
        :return: None (IO side effect: generate a code file)
        """
        pass

    @abstractmethod
    def save_pmml(self, pmml_file: str) -> None:
        """
        Parse the tree models to pmml file and provide the commonality.

        :param pmml_file: output pmml file
        :return: None (IO side effect: generate a pmml file)
        """
        pass

    @abstractmethod
    def save_model(self, model_file: str) -> None:
        """
        Parse the tree models to models file and provide the commonality.

        :param model_file: output models file
        :return: None (IO side effect: generate a models file)
        """
        pass


class LGBModelParser(TreeModelParser):
    """
    LightGBM models parser.
    """
    __slots__ = ("lgbm", "booster", "features")

    def __init__(self, lgbm: Union[lgb.Booster, LGBMClassifier, LGBMRegressor], features: List[str]):
        """
        Initialize method for building a LightGBM models parser.

        :param lgbm: lightgbm models, can be lgb.booster (train with native api) or LGBMClassifier/LGBMRegressor (sklearn api)
        :param features: models variables
        """
        if isinstance(lgbm, (LGBMRegressor, LGBMClassifier)):
            booster = lgbm.booster_
            logger.info("LightGBM sklearn api is used while training this models!")
        elif isinstance(lgbm, lgb.Booster):
            booster = lgbm
            logger.info("LightGBM native api is used while training this models!")
        else:
            raise TypeError("Input should be a lightgbm booster or LGBMClassifier/LGBMRegressor!")
        self.lgbm = lgbm
        self.booster = booster
        assert features is not None, "Features should not be None!"
        self.features = features

    @classmethod
    def load_from(cls, model_file: str, feature_file: str):
        """
        Alternate constructor from model file.

        :param model_file: models file
        :param feature_file: models feature file
        :return: models parser instance
        """
        _, model_suffix = get_file_suffix(model_file)
        _, feature_suffix = get_file_suffix(feature_file)
        if model_suffix == '.pkl':
            lgbm = load_pkl(model_file)
        elif model_suffix == '.m':
            lgbm = load_m(model_file)
        elif model_suffix == '.json':
            lgbm = load_json(model_file)
        else:
            lgbm = lgb.Booster(model_file=model_file)

        if feature_suffix == '.pkl':
            features = load_pkl(feature_file)
        elif feature_suffix == '.m':
            features = load_m(feature_file)
        elif feature_suffix == '.json':
            features = load_json(feature_file)
        else:
            features = load_txt(feature_file)
        return cls(lgbm, features)

    def parse_one_tree(self, tree: dict, idx: int, lang: str = 'python') -> str:
        """
        Parse one tree to a predict function.

        :param tree: object with tree structure
        :param idx: tree index
        :param lang: output language
        :return: code string of a predict function
        """

        def if_else(node: dict, depth: int, lang: str):
            indent = "    " * depth
            if 'leaf_index' in node:
                leaf_value = node['leaf_value']
                if lang in ('python', 'go'):
                    return f"{indent}return {float(leaf_value)}"
                elif lang in ('java', 'cpp', 'javascript'):
                    return f"{indent}return {float(leaf_value)};"
            else:
                decision_type, threshold = node['decision_type'], node['threshold']
                left, right = if_else(node['left_child'], depth + 1, lang), if_else(node['right_child'], depth + 1,
                                                                                    lang)
                if lang == 'python':
                    split_feature = repr(self.features[node['split_feature']])
                    return f"{indent}if d[{split_feature}] {decision_type} {threshold}:\n{left}\n{indent}else:\n{right}"
                elif lang == 'java':
                    split_feature = qrepr(self.features[node['split_feature']])
                    return f"{indent}if (d.get({split_feature}) {decision_type} {threshold}) {{\n{left}\n{indent}}} else {{\n{right}\n{indent}}}"
                elif lang == 'cpp':
                    split_feature = qrepr(self.features[node['split_feature']])
                    return f"{indent}if (d.find({split_feature})->second {decision_type} {threshold}) {{\n{left}\n{indent}}} else {{\n{right}\n{indent}}}"
                elif lang == 'javascript':
                    split_feature = qrepr(self.features[node['split_feature']])
                    return f"{indent}if (d[{split_feature}] {decision_type} {threshold}) {{\n{left}\n{indent}}} else {{\n{right}\n{indent}}}"
                elif lang == 'go':
                    split_feature = qrepr(self.features[node['split_feature']])
                    return f"{indent}if d[{split_feature}] {decision_type} {threshold} {{\n{left}\n{indent}}} else {{\n{right}\n{indent}}}"

        if lang == 'python':
            return f"""def predict_tree_{idx}(d):
{if_else(tree, 1, lang)}
"""
        elif lang == 'java':
            return f"""    private double predict_tree_{idx}(Map<String, Double> d) {{
{if_else(tree, 2, lang)}
    }}
"""
        elif lang == 'cpp':
            return f"""    const double predict_tree_{idx}(const unordered_map<string, double> &d) {{
{if_else(tree, 2, lang)}
    }}
"""
        elif lang == 'javascript':
            return f"""function predict_tree_{idx}(d) {{
{if_else(tree, 1, lang)}
}}
"""
        elif lang == 'go':
            return f"""func predict_tree_{idx}(d map[string]float64) float64 {{
{if_else(tree, 1, lang)}
}}
"""

    def parse_all_trees(self, trees: list, lang: str) -> str:
        """
        Parse all trees to many predict functions.

        :param trees: list of objects with tree structure
        :param lang: output language
        :return: code string of many predict functions
        """
        predict_trees = '\n\n'.join(
            self.parse_one_tree(tree['tree_structure'], idx, lang) for idx, tree in enumerate(trees))
        final_return = ' + '.join(f"predict_tree_{i}(d)" for i in range(len(trees)))
        if lang == 'python':
            return f"""{predict_trees}

def predict_tree(d):
    return {final_return}
"""
        elif lang == 'java':
            return f"""{predict_trees}

    private double predict_tree(Map<String, Double> d) {{
        return {final_return};
    }}
"""
        elif lang == 'cpp':
            return f"""{predict_trees}

    const double predict_tree(const unordered_map<string, double> &d) {{
        return {final_return};
    }}
"""
        elif lang == 'javascript':
            return f"""{predict_trees}

function predict_tree(d) {{
    return {final_return};
}}
"""
        elif lang == 'go':
            return f"""{predict_trees}

func predict_tree(d map[string]float64) float64 {{
    return {final_return}
}}
"""

    def logit(self, lang: str = 'python') -> str:
        """
        Use parse_logistic function to summary all the predictions.

        :param lang: output language
        :return: code string representing a standard parse_logistic function
        """
        if lang == 'python':
            return """def predict(d):
    import math
    return 1 / (1 + math.exp(-predict_tree(d)))
"""
        elif lang == 'java':
            return """    public double predict(Map<String, Double> d) {
        return 1.0 / (1.0 + Math.exp(-predict_tree(d)));
    }
"""
        elif lang == 'cpp':
            return """    const double predict(const unordered_map<string, double> &d) {
        return 1.0 / (1.0 + std::exp(-predict_tree(d)));
    }
"""
        elif lang == 'javascript':
            return """function predict(d) {
    return 1.0 / (1.0 + Math.exp(-predict_tree(d)));
}
"""
        elif lang == 'go':
            return """func predict(d map[string]float64) float64 {
    return 1.0 / (1.0 + math.Exp(-predict_tree(d)))
}
"""

    def parse(self, file_name: str = 'if_else.py', lang: str = 'python'):
        """
        Parse the lgbm models to a module file and provide the equivalent predict function.

        :param file_name: output code file
        :param lang: output language
        :return: None (IO side effect: generate a code file)
        """
        _, suffix = get_file_suffix(file_name)
        assert suffix in LANGUAGE_SUFFIXES[lang], f"Language {lang} has no file suffix {suffix}!"
        model_json = self.booster.dump_model()
        trees = model_json["tree_info"]
        n_trees = len(trees)
        from timeit import default_timer as timer
        start = timer()
        with open(file_name, 'w', encoding='utf-8') as f:
            if lang in ('python', 'javascript'):
                code = f"""{get_code_banner(lang=lang)}

{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
"""
            elif lang == 'java':
                code = f"""{get_code_banner(lang=lang)}
import java.util.Map;


public class LightGBModel {{
{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
}}
"""
            elif lang == 'cpp':
                code = f"""{get_code_banner(lang=lang)}
#include <unordered_map>
#include <string>

using namespace std;

class LightGBModel {{
private:
{self.parse_all_trees(trees, lang=lang)}
public:
{self.logit(lang=lang)}
}}
"""
            elif lang == 'go':
                code = f'''{get_code_banner(lang=lang)}
package main

import "math"

{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
'''
            f.write(code)
        end = timer()
        n_lines = len(open(file_name, encoding='utf-8').readlines())
        logger.info(
            "Parsing trees done! (Refer to file: {})\n\t"
            "Module: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "generate time: {:.3f} s"
                .format(file_name, file_name, n_trees, n_lines, end - start)
        )

    def save_pmml(self, pmml_file: str = 'lgbm.pmml') -> None:
        """
        Parse the lgbm models to pmml file and provide the commonality.

        :param pmml_file: output pmml file
        :return: None (IO side effect: generate a pmml file)
        """
        if isinstance(self.lgbm, (lgb.Booster, lgb.sklearn.LGBMRegressor)):
            raise TypeError("For pmml conversion, you need a LGBMClassifier!")

        model_json = self.booster.dump_model()
        n_trees = len(model_json['tree_info'])
        from timeit import default_timer as timer
        start = timer()
        pipeline = PMMLPipeline([("classifier", self.lgbm)])
        # sklearn2pmml(pipeline, pmml_file, with_repr=True, debug=True)
        # TODO: pmml可以直接生成带原始变量名的, 但是需要用pipeline训练
        sklearn2pmml(pipeline, pmml_file, with_repr=False)
        end = timer()
        n_lines = len(open(pmml_file).readlines())
        logger.info(
            "Saving pmml done! (Refer to file: {})\n\t"
            "PMML: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "time: {:.3f} s"
                .format(pmml_file, pmml_file, n_trees, n_lines, end - start)
        )

    def save_model(self, model_file: str = 'lgbm_model.pkl', feature_file: str = 'lgbm_feature.pkl') -> None:
        """
        Parse the lgbm models to models file and provide the commonality.

        :param model_file: output model file
        :param feature_file: output feature file
        :return: None (IO side effect: generate model file and feature file)
        """
        _, model_suffix = get_file_suffix(model_file)
        _, feature_suffix = get_file_suffix(feature_file)
        from timeit import default_timer as timer
        start = timer()
        if model_suffix == '.pkl':
            dump_pkl(self.lgbm, model_file)
        elif model_suffix == '.m':
            dump_m(self.lgbm, model_file)
        elif model_suffix == '.json':
            dump_json(self.lgbm, model_file)
        else:
            self.booster.save_model(model_file)

        if feature_suffix == '.pkl':
            dump_pkl(self.features, feature_file)
        elif feature_suffix == '.m':
            dump_m(self.features, feature_file)
        elif feature_suffix == '.json':
            dump_json(self.features, feature_file)
        else:
            dump_txt(self.features, feature_file)

        model_json = self.booster.dump_model()
        n_trees = len(model_json['tree_info'])
        end = timer()
        n_lines = len(open(model_file, 'rb').readlines())
        logger.info(
            "Saving model and feature done! (Refer to file: {}, {})\n\t"
            "Model: {}\n\t"
            "# of features: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "time: {:.3f} s"
                .format(model_file, feature_file, model_file, len(self.features), n_trees, n_lines, end - start)
        )


@dataclass
class Node(object):
    feature: str = None
    node_id: int = None
    left_id: int = None
    right_id: int = None
    missing_id: int = None
    leaf_val: float = None
    split_val: float = None
    is_leaf: bool = False


@dataclass
class Tree(object):
    node: Node
    left: 'Tree' = None
    right: 'Tree' = None


class XGBModelParser(TreeModelParser):
    """
    XGBoost models parser.
    """
    __slots__ = ('xgbm', 'booster', 'features')

    def __init__(self, xgbm: Union[xgb.core.Booster, xgb.sklearn.XGBClassifier, xgb.sklearn.XGBRegressor],
                 features: List[str]):
        """
        Initialize method for building a XGB models parser.

        :param xgbm: xgboost booster or sklearn Regressor/Classifier
        :param features: models variables
        """
        assert features is not None, "Features should not be None!"
        self.features = features
        if isinstance(xgbm, xgb.core.Booster):
            booster = xgbm
        elif isinstance(xgbm, (xgb.sklearn.XGBClassifier, xgb.sklearn.XGBRegressor)):
            booster = xgbm.get_booster()
        else:
            raise TypeError("Input should be a xgboost booster or XGBClassifier/XGBRegressor!")
        self.booster = booster
        model_json = booster.get_dump(dump_format='json')
        trees = list(map(json.loads, model_json))

        self.predict_trees = []
        for tree in trees:
            self.node_list = {}
            self.build_nodes(tree)
            root = Tree(node=self.node_list[0])
            self.build_tree(root)
            self.predict_trees.append(root)

    def build_nodes(self, d: dict) -> None:
        """
        Build one tree's node list.

        :param d: tree structured object, i.e. dict
        :return: None (Side effect: fill in node list)
        """
        if 'leaf' in d:
            node = Node(node_id=d['nodeid'], leaf_val=d['leaf'], is_leaf=True)
            self.node_list[node.node_id] = node
            return None
        if d['split'] in self.features:
            feature = d['split']
        else:
            assert d['split'].startswith('f'), "Unknown feature encountered!"
            idx = int(d['split'][1:])
            feature = self.features[idx]
        node = Node(node_id=d['nodeid'], feature=feature, split_val=d['split_condition'], left_id=d['yes'],
                    right_id=d['no'], missing_id=d['missing'])
        self.node_list[node.node_id] = node
        for child in d['children']:
            self.build_nodes(child)

    def build_tree(self, tree: Tree):
        """
        Build one nest tree from its root.

        :param tree: tree root
        :return: None (Side effect: input tree is modified)
        """
        if tree.node.left_id is not None:
            tree.left = Tree(node=self.node_list[tree.node.left_id])
            self.build_tree(tree.left)
        if tree.node.right_id is not None:
            tree.right = Tree(node=self.node_list[tree.node.right_id])
            self.build_tree(tree.right)

    @classmethod
    def load_from(cls, model_file: str, feature_file: str):
        """
        Alternate constructor from models file.

        :param model_file: models file
        :param feature_file: models feature file
        :return: models parser instance
        """
        _, model_suffix = get_file_suffix(model_file)
        _, feature_suffix = get_file_suffix(feature_file)
        if model_suffix == '.pkl':
            xgbm = load_pkl(model_file)
        elif model_suffix == '.m':
            xgbm = load_m(model_file)
        elif model_suffix == '.json':
            xgbm = load_json(model_file)
        else:
            xgbm = xgb.Booster(model_file=model_file)

        if feature_suffix == '.pkl':
            features = load_pkl(feature_file)
        elif feature_suffix == '.m':
            features = load_m(feature_file)
        elif feature_suffix == '.json':
            features = load_json(feature_file)
        else:
            features = load_txt(feature_file)
        return cls(xgbm, features)

    def parse_one_tree(self, tree: Tree, idx: int, lang: str = 'python') -> str:
        """
        Parse one tree to a predict function.

        :param tree: object with tree structure
        :param idx: tree index
        :param lang: output language
        :return: code string of a predict function
        """

        def if_else(tree: Tree, depth: int, lang: str) -> str:
            is_leaf, leaf_val = tree.node.is_leaf, tree.node.leaf_val
            feature, split_val, left, right, missing = tree.node.feature, tree.node.split_val, tree.node.left_id, tree.node.right_id, tree.node.missing_id
            indent = "    " * depth
            if is_leaf:
                if lang in ('python', 'go'):
                    return f"{indent}return {float(leaf_val)}"
                elif lang in ('java', 'cpp', 'javascript'):
                    return f"{indent}return {float(leaf_val)};"
            else:
                # if missing == left:
                #     if_condition = f"d[{repr(feature)}] is None or d[{repr(feature)}] <= {split_val}"
                #     elif_condition = f"d[{repr(feature)}] > {split_val}"
                # else:
                #     if_condition = f"d[{repr(feature)}] <= {split_val}"
                #     elif_condition = f"d[{repr(feature)}] is None or d[{repr(feature)}] > {split_val}"
                # left_res, right_res = if_else(tree.left, depth + 1, lang), if_else(tree.right, depth + 1, lang)
                # if lang == 'python':
                #     return f"{indent}if {if_condition}:\n{left_res}\n{indent}elif {elif_condition}:\n{right_res}"
                # elif lang == 'java':
                #     return f"{indent}if ({if_condition}) {{\n{left_res}\n{indent}}} else if ({elif_condition}) {{\n{right_res}\n{indent}}}"
                # elif lang in ('c', 'cpp', 'javascript'):
                #     return f"{indent}if ({if_condition}) {{\n{left_res}\n{indent}}} else if ({elif_condition}) {{\n{right_res}\n{indent}}}"
                # elif lang == 'go':
                #     return f"{indent}if {if_condition} {{\n{left}\n{indent}}} else if {elif_condition} {{\n{right}\n{indent}}}"

                # no missing values in bairong data!
                left_res, right_res = if_else(tree.left, depth + 1, lang), if_else(tree.right, depth + 1, lang)
                if lang == 'python':
                    return f"{indent}if d[{repr(feature)}] < {split_val}:\n{left_res}\n{indent}else:\n{right_res}"
                elif lang == 'java':
                    return f"{indent}if (d.get({qrepr(feature)}) < {split_val}) {{\n{left_res}\n{indent}}} else {{\n{right_res}\n{indent}}}"
                elif lang == 'cpp':
                    return f"{indent}if (d.find({qrepr(feature)})->second < {split_val}) {{\n{left_res}\n{indent}}} else {{\n{right_res}\n{indent}}}"
                elif lang == 'javascript':
                    return f"{indent}if (d[{qrepr(feature)}] < {split_val}) {{\n{left_res}\n{indent}}} else {{\n{right_res}\n{indent}}}"
                elif lang == 'go':
                    return f"{indent}if d[{qrepr(feature)}] < {split_val} {{\n{left}\n{indent}}} else {{\n{right}\n{indent}}}"

        if lang == 'python':
            return f"""def predict_tree_{idx}(d):
{if_else(tree, 1, lang)}
"""
        elif lang == 'java':
            return f"""    private double predict_tree_{idx}(Map<String, Double> d) {{
{if_else(tree, 2, lang)}
    }}
"""
        elif lang == 'cpp':
            return f"""    const double predict_tree_{idx}(const unordered_map<string, double> &d) {{
{if_else(tree, 2, lang)}
    }}
"""
        elif lang == 'javascript':
            return f"""function predict_tree_{idx}(d) {{
{if_else(tree, 1, lang)}
}}
"""
        elif lang == 'go':
            return f"""func predict_tree_{idx}(d map[string](float64) float64 {{
{if_else(tree, 1, lang)}
}}
"""

    def parse_all_trees(self, trees: List[Tree], lang: str = 'python') -> str:
        """
        Parse all trees to many predict functions.

        :param trees: list of objects with tree structure
        :param lang: output language
        :return: code string of many predict functions
        """
        predict_trees = '\n'.join(self.parse_one_tree(tree, idx, lang) for idx, tree in enumerate(trees))
        final_return = ' + '.join(f"predict_tree_{i}(d)" for i in range(len(trees)))
        if lang == 'python':
            return f"""{predict_trees}

def predict_tree(d):
    return {final_return}
"""
        elif lang == 'java':
            return f"""{predict_trees}

    private double predict_tree(Map<String, Double> d) {{
        return {final_return};
    }}
"""
        elif lang == 'cpp':
            return f"""{predict_trees}

    const double predict_tree(const unordered_map<string, double> &d) {{
        return {final_return};
    }}
"""
        elif lang == 'javascript':
            return f"""{predict_trees}

function predict_tree(d) {{
    return {final_return};
}}
"""
        elif lang == 'go':
            return f"""{predict_trees}

func predict_tree(d map[string]float64) float64 {{
    return {final_return}
}}
"""

    def logit(self, lang: str = 'python') -> str:
        """
        Use parse_logistic function to summary all the predictions.

        :param lang: output language
        :return: code string representing a standard parse_logistic function
        """
        if lang == 'python':
            return f"""def predict(d):
    import math
    return 1 / (1 + math.exp(-predict_tree(d)))
"""
        elif lang == 'java':
            return """    public double predict(Map<String, Double> d) {
        return 1.0 / (1.0 + Math.exp(-predict_tree(d)));
    }
"""
        elif lang == 'cpp':
            return """    const double predict(const unordered_map<string, double> &d) {
        return 1.0 / (1.0 + std::exp(-predict_tree(d)));
    }
"""
        elif lang == 'javascript':
            return """function predict(d) {
    return 1.0 / (1.0 + Math.exp(-predict_tree(d)));
}
"""
        elif lang == 'go':
            return """func predict(d map[string]float64) float64 {
    return 1.0 / (1.0 + math.Exp(-predict_tree(d)))
}
"""

    def parse(self, file_name: str = 'if_else.py', lang: str = 'python'):
        """
        Parse the xgboost models to a module file and provide the equivalent predict function.

        :param file_name: code file contains if else logic
        :param lang: output language
        :return: None (IO side effect: generate a code file)
        """
        _, suffix = get_file_suffix(file_name)
        assert suffix in LANGUAGE_SUFFIXES[lang], f"Language {lang} has no file suffix {suffix}!"
        from timeit import default_timer as timer
        trees = self.predict_trees
        n_trees = len(trees)
        start = timer()
        with open(file_name, 'w', encoding='utf-8') as f:
            if lang in ('python', 'javascript'):
                code = f"""{get_code_banner(lang=lang)}

{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
"""
            elif lang == 'java':
                code = f"""{get_code_banner(lang=lang)}
import java.util.Map;


public class XGBModel {{
{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
}}
"""
            elif lang == 'cpp':
                code = f"""{get_code_banner(lang=lang)}
#include <unordered_map>
#include <string>

using namespace std;

class XGBModel {{
private:
{self.parse_all_trees(trees, lang=lang)}
public:
{self.logit(lang=lang)}
}}
"""
            elif lang == 'go':
                code = f'''{get_code_banner(lang=lang)}
package main

import "math"

{self.parse_all_trees(trees, lang=lang)}

{self.logit(lang=lang)}
'''
            f.write(code)
        end = timer()
        n_lines = len(open(file_name, encoding='utf-8').readlines())
        logger.info(
            "Parsing trees done! (Refer to file: {})\n\t"
            "Module: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "time: {:.3f} s"
                .format(file_name, file_name, n_trees, n_lines, end - start)
        )

    def save_pmml(self, pmml_file: str = 'xgbm.pmml') -> None:
        """
        Parse the lgbm models to pmml file and provide the commonality.

        :param pmml_file: output pmml file
        :return: None (IO side effect: generate a pmml file)
        """
        if isinstance(self.xgbm, (xgb.core.Booster, xgb.sklearn.XGBRegressor)):
            raise TypeError("For pmml conversion, you need a XGBClassifier!")
        from timeit import default_timer as timer
        start = timer()
        model_json = self.booster.get_dump(dump_format='json')
        trees = list(map(json.loads, model_json))
        n_trees = len(trees)
        pipeline = PMMLPipeline([("classifier", self.xgbm)])
        sklearn2pmml(pipeline, pmml_file, with_repr=True, debug=True)
        end = timer()
        n_lines = len(open(pmml_file).readlines())
        logger.info(
            "Saving pmml done! (Refer to file: {})\n\t"
            "PMML: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "time: {:.3f} s"
                .format(pmml_file, pmml_file, n_trees, n_lines, end - start)
        )

    def save_model(self, model_file: str = 'xgbm.models', feature_file: str = 'xgbm.feature') -> None:
        """
        Parse the lgbm models to models file and provide the commonality.

        :param model_file: output models file
        :param feature_file: output feature file
        :return: None (IO side effect: generate models file and feature file)
        """
        _, model_suffix = get_file_suffix(model_file)
        _, feature_suffix = get_file_suffix(feature_file)
        from timeit import default_timer as timer
        start = timer()
        if model_suffix == '.pkl':
            dump_pkl(self.xgbm, model_file)
        elif model_suffix == '.m':
            dump_m(self.xgbm, model_file)
        elif model_suffix == '.json':
            dump_json(self.xgbm, model_file)
        else:
            self.booster.save_model(model_file)

        if feature_suffix == '.pkl':
            dump_pkl(self.features, feature_file)
        elif feature_suffix == '.m':
            dump_m(self.features, feature_file)
        elif feature_suffix == '.json':
            dump_json(self.features, feature_file)
        else:
            dump_txt(self.features, feature_file)

        model_json = self.booster.get_dump(dump_format='json')
        trees = list(map(json.loads, model_json))
        n_trees = len(trees)
        end = timer()
        n_lines = len(open(model_file).readlines())
        logger.info(
            "Saving model and feature done! (Refer to file: {}, {})\n\t"
            "Model: {}\n\t"
            "# of features: {}\n\t"
            "# of trees: {}\n\t"
            "# of lines: {}\n\t"
            "time: {:.3f} s"
                .format(model_file, feature_file, model_file, len(self.features), n_trees, n_lines, end - start)
        )
