import ftplib
import shutil
import threading

from rftp import RFtp
from util import *
from const import *
import re
import os
from rgit import RGit
import datetime
import subprocess
import binascii
from rcfg import cfg
import io as bio
from sio import *
import logging


def file_check(*args):
    for file in args:
        if not Path(file).is_file():
            raise Exception(f'file not found: {file}')


def is_model(m) -> bool:
    p = Path(m)
    if p.is_dir() and p.joinpath('make.h').is_file() \
            and p.joinpath('env.h').is_file() \
            and p.joinpath('Makefile').is_file():
        return True
    else:
        return False


def model_type(m) -> str:
    if is_model(m) is False:
        return ''
    tmp = Model(model_path=m)
    tmp.pre_init()
    if 'EHB' in tmp.__str__():
        return 'elecom'
    elif 'DGS' in tmp.__str__() or 'DES' in tmp.__str__():
        return 'me'
    else:
        return ''


def generate_commands(cmds):
    commands = command_name + ':\n'
    for (k, v) in cmds.items():
        commands += f'\techo "{k}={v}"\n'
    return commands


class Model:
    repo_model = ('ENV', 'BOOT', 'OS', 'SDK', 'ICORE', 'CUS')
    release_var_re = re.compile(r'(\S+)=(\S+)')
    makefile_commands = {
        'MODEL': '${MODEL_NAME}',
        'ROOT': '${MGM_PROJECT_BASE_DIR}',
        'DISPLAY_MODEL_NAME': '${DISPLAY_MODEL_NAME}',
        'BOOT_REPO': '$(MGM_PROJECT_BASE_DIR)/boot/$(BOOT_PACKAGE)',
        'OS_REPO': '$(MGM_PROJECT_BASE_DIR)/os/$(OS_PACKAGE)',
        'SDK_REPO': '$(MGM_PROJECT_BASE_DIR)/sdk/$(SDK_PACKAGE)',
        'CORE_REPO': '$(MGM_PROJECT_BASE_DIR)/core/code',
        'CUSTOMER_REPO': '${CUSTOMER_ABSOLUTE_DIR}',
    }

    def _init_check(self, **kwargs):
        for k, v in kwargs.items():
            if k == 'model_path':
                self._model_path = path_abs(v)

        if self._model_path is None:
            raise Exception('Invalid model path')

        self._env_file = path_attach_abs(
            self._model_path, 'env.h'
        )
        self._make_file = path_attach_abs(
            self._model_path, 'Makefile'
        )
        self._make_header = path_attach_abs(
            self._model_path, 'make.h'
        )

        file_check(
            self._env_file, self._make_file, self._make_header
        )

    def __init__(self, **kwargs):
        self._lock = threading.RLock()
        self._model_path = None
        self._env_file = None
        self._make_file = None
        self._make_header = None
        self._init_check(**kwargs)

        self._release_var = {}
        self._support_make_type = ()
        self._make_dict = {
            make_all_clean_core_id: self._make_core_clean_all,
            make_core_id: self._make_core_fw_all,
            make_all_id: self._make_all
        }
        self._git_info = ''
        self._new_feature = 'N/A'
        self._bug_fixed = 'N/A'
        self._changed = 'N/A'
        self._note = 'N/A'
        self._known_issue = 'N/A'

        self._last_release_note = []
        self._last_release_note_arr = [] # elecom has 2 releasenote file
        self._last_release_note_dir = ""
        self._local_released_flag = False
        self._log_file = ''
        self._log_buf = ''
        self._process = None
        self._make_type = None
        self._file_info = []

        self._project_name = ''
        self._release_note_tail_str = '$^*&#$^%&*#$' # random str, sub module must override this.

    def files(self):
        return self._file_info

    def _sync_file_info(self):
        io.emit(edm_model_info_report, f'Model {self.__str__()} start sync files info.')
        # <2> do different projects (elecom/me) sync routines...

    def _sync_local_dirs(self):
        ...

    def _sync_ftp_dirs(self):
        ...

    def _append_commands_to_makefile(self):
        append_to_file_filter(generate_commands(self.makefile_commands), self._make_file,
                              filter_reg=r'^' + rf'{command_name}:')

    def _strip_release_info(self):
        def is_var_valid(v: str):
            return '#' not in v

        args = ['/tmp/release_log.txt', 'make', '--no-print-directory', '-C', self._model_path,
                'release_info']
        c = do(args)
        # parse the completed output
        lines = str(c.stdout, 'utf-8').split('\n')
        for line in lines:
            m = Model.release_var_re.match(line)
            if m and is_var_valid(m.group(2)):
                if m.group(1) in self.makefile_commands:
                    self._release_var.update({
                        m.group(1): m.group(2)
                    })

    def pre_init(self):
        with self._lock:
            self._append_commands_to_makefile()
            self._strip_release_info()

    def init(self):
        with self._lock:
            self._append_commands_to_makefile()
            self._strip_release_info()
            dir_create(path_attach_abs(cfg.log_dir, self.__str__()))
            self._log_file = combine_path([
                cfg.log_dir, self.__str__(), self.__str__()
            ])
            Path(self._log_file).touch(exist_ok=True)
            p = combine_path([cfg.local_rls_note_store, self.__str__()])
            self._last_release_note_dir = p
            if Path(p).exists():
                for file in Path(p).iterdir():
                    with open(file, 'r') as f:
                        self._last_release_note = f.readlines()
                        self._last_release_note_arr.append({
                            "file": file.__str__(),
                            "content": self._last_release_note,
                        })
            else:
                dir_create(p)

    def model_name(self):
        with self._lock:
            return self._release_var['MODEL']

    def _make(self, args: str) -> Popen:
        cwd = os.getcwd()
        os.chdir(self._release_var['ROOT'])
        args = self._log_file + ' ' + args
        self._process = do2(args)
        os.chdir(cwd)
        return self._process

    """ make routines """

    def _make_core_clean_all(self) -> Popen:
        args = 'make core-clean all'
        return self._make(args)

    def _make_core_fw_all(self) -> Popen:
        args = 'make core fw.all'
        return self._make(args)

    def _make_all(self) -> Popen:
        args = 'make all'
        return self._make(args)

    def _clear_img_dir(self):
        shutil.rmtree(self._release_var['OUTPUT_IMAGE_DIR'], ignore_errors=True)
        os.mkdir(self._release_var['OUTPUT_IMAGE_DIR'])

    def make(self, make_type_id: str):
        with self._lock:
            # self._clear_img_dir()
            # TODO: After clear image directory, error occurred when compiling: cp: cannot stat
            #  ‘/home/wang/fwrelease/DGS-1210-28PME-B1/image/DGS-1210-28PME-B1/vmlinux.gz’: No such file or directory
            #  make: *** [vmlinux-attach-version-checksum] Error 1
            self._local_released_flag = False
            self._log_buf = ''
            print(f'Start make with make id: {make_type_id}')
            io.emit(edm_model_info_report, f'Model {self.__str__()} start make with type: {make_type_id}.')
            self._process = self._make_dict[make_type_id]()
            self._make_type = make_type_id

    def update_all_repos(self):
        ...

    def detail(self):
        """
        type: dict
        template:
        ========================================
        {
            'name': '<model_name>'
            'repos_info':
            [
                {
                    'name': 'CORE_REPO',
                    'current': 'dev_sunflower_B012',
                    'remote':
                    [
                        'origin/dev_sunflower_B011',
                        'origin/dev_sunflower_B012',
                        'origin/dev_sunflower_B013',
                        ...
                    ]
                },
                ...
            ]
        }
        ========================================
        """
        return {
            'name': self.__str__(),
            'repos_info': self._repos_info(),
            'make_type_list': list(self._support_make_type),
            'default_make_type': self._support_make_type[0],
            'project_name': self._project_name,
            'is_virtual': '0',
        }

    def _repos_info(self):
        """
        type: list
        template:
        ============================================
        [
            {
                'name': 'CORE_REPO',
                'current':
                {
                    'full_name': 'dev_sunflower_B012',
                    'base_name': 'dev_sunflower_B012',
                    'commit_id': '<commit_id>',
                    'commit': '<commit>',
                }
                'remote':
                [
                    {
                        'full_name': 'origin/dev_sunflower_B011',
                        'base_name': 'dev_sunflower_B011',
                        'commit_id': '<commit_id>',
                        'commit': '<commit>',
                    },
                    {
                        'full_name': 'origin/dev_sunflower_B012',
                        'base_name': 'dev_sunflower_B012',
                        'commit_id': '<commit_id>',
                        'commit': '<commit>',
                    },
                    ...
                ]
            },
            ...
        ]
        ============================================
        """
        repos_info = []
        for repo in repo_list:
            # TODO: uncomment this for real production env
            # RGit(self._release_var[repo]).restore_worktree()
            # RGit(self._release_var[repo]).pull()
            repos_info.append({
                'name': repo,

                'current': RGit(
                    self._release_var[repo]
                ).branch(
                    RGit.BRANCH_CURRENT
                )[0],

                'remote': RGit(
                    self._release_var[repo]
                ).branch(RGit.BRANCH_REMOTE)
            })
        return repos_info

    def checkout_branch(self, repo: str, branch: str):
        RGit(self._release_var[repo]).fetch_remote_branch(branch)

    def _log_to_buf(self):
        if self._log_buf == '':
            with open(self._log_file, 'r') as f:
                self._log_buf = f.read()

    def log(self):
        return self._log_buf

    def _release_to_local(self):
        self._strip_release_info()  # must re-strip to get updated release information
        self._sync_local_dirs()
        self._sync_ftp_dirs()

    def state(self):
        with self._lock:
            if isinstance(self._process, subprocess.Popen):
                self._process.poll()
                if self._process.returncode is not None:
                    if self._process.returncode != 0:
                        self._log_to_buf()
                        return failed
                    else:
                        if self._local_released_flag is False:
                            self._release_to_local()
                            self._sync_file_info()
                        return succeed
                else:
                    return running
            else:
                return free

    def reset(self):
        with self._lock:
            self._make_type = None
            self._file_info = []

    def upload_all_to_ftp(self):
        ...

    def upload_to_ftp(self, files):
        ftp = RFtp(cfg.ftp_host)
        ftp.login(cfg.ftp_user, cfg.ftp_passwd)

        for file in files:
            print(f'Start uploading {file}...')
            upload_state = succeed
            failed_info = ''
            logging.info(f'Start uploading {file}...')
            try:
                ftp.upload_file(file['local'], file['ftp'], file['name'])
                logging.info(f'{file} upload done.')

            except TimeoutError as timeout:
                failed_info = f"Connection timeout: {timeout}, \r\n" \
                              f"Please check your network connection to host: {cfg.ftp_host}"
                upload_state = failed
            except ftplib.error_perm as err_perm:
                failed_info = f"Permission error: {err_perm}"
                upload_state = failed
            except Exception as e:
                failed_info = f'{e}'
                upload_state = failed
            io.emit(edm_model_upload_state_report, {
                'model_name': self.__str__(),
                'file_name': file['name'],
                'local_path': file['local'],
                'ftp_path': file['ftp'],
                'upload_state': upload_state,
                'failed_info': failed_info
            })
            logging.debug(f'This project\'s release note tail string is: {self._release_note_tail_str}')
            if upload_state == succeed and self._release_note_tail_str in file['name']:
                logging.debug(f'upload release file {file["name"]} success!')
                if self._project_name == 'me':
                    shutil.rmtree(self._last_release_note_dir)
                    dir_create(self._last_release_note_dir)
                    logging.debug(f'ME: Removed and created release note dir({self._last_release_note_dir}) for ME')
                elif self._project_name == 'elecom':
                    if '_ALL_' in file['name']:
                        for f in Path(self._last_release_note_dir).iterdir():
                            if '_ALL_' in f.__str__():
                                os.remove(f)
                                logging.debug(f'Elecom: Removed {f}')
                    elif '_FW_' in file['name']:
                        for f in Path(self._last_release_note_dir).iterdir():
                            if '_FW_' in f.__str__():
                                os.remove(f)
                                logging.debug(f'Elecom: Removed {f}')
                full_path_with_name = combine_path([file['local'], file['name']])
                shutil.copy(full_path_with_name, self._last_release_note_dir)
                logging.info(f'====== copy file {full_path_with_name} to {self._last_release_note_dir} ')
                # shutil.copy(file['local'], self._last_release_note_dir)
                logging.debug(f'Copy {file["local"]} to {self._last_release_note_dir} done.')
            else:
                logging.warning(f'upload release file {file["name"]} failed !!!')

        ftp.close()

    def _gen_git_info(self):
        sh = 'make -C %s git-info --no-print-directory' % self._model_path
        (status, output) = subprocess.getstatusoutput(sh)
        # by the way, update git_info in model_params dict
        lines = output.split('\n')
        git_info = ''
        git_info += '(model)  (repertory)          (branch)             (version)      (commit by)      (date)\r\n'
        for line in lines:
            larr = line.split()
            if larr[0] in self.repo_model:
                git_info += ' %-8s %-20s%-21s%-16s%-16s%-s\r\n' % (
                    larr[0], larr[1], larr[2], larr[3][5:], larr[4][5:],
                    ('<' + re.search(r'(.*)<(.*)>(.*)', line).group(2) + '>'))
        self._git_info = git_info

    def set_note(self, n: str):
        self._note = n
        return self

    def set_known_issue(self, ni: str):
        self._known_issue = ni
        return self

    def set_changed(self, c: str):
        self._changed = c
        return self

    def set_new_feature(self, n_f: str):
        self._new_feature = n_f
        return self

    def set_bug_fixed(self, b_f: str):
        self._bug_fixed = b_f
        return self

    def _model_name(self):
        return self._release_var.get('MODEL')

    def _display_model_name(self):
        return self._release_var.get('DISPLAY_MODEL_NAME')

    def __str__(self):
        return self._model_name()

    def __eq__(self, other):
        if isinstance(other, Model):
            return self.model_name() == other.model_name()
        return False
