import logging
import os
import git
import time
import tarfile
import shutil
import yaml
import multiprocessing
from threading import Thread
from queue import Queue
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED

from lib.gitee_proxy import GiteeProxy

logger = logging.getLogger()

class Fetch(object):

    def __init__(self, to_dir, repolist_dir, repo_name, gitee_owner, gitee_token) -> None:
        self._to_dir = to_dir
        self._repolist_dir = repolist_dir
        self._gitee = GiteeProxy(gitee_owner, repo_name, gitee_token)

    @staticmethod
    def init_source_dir(source_dir):
        if not os.path.exists(source_dir):
            os.mkdir(source_dir)

    @staticmethod
    def init_repo(remote, source_dir, repo_path):
        try:
            logger.info("fetch... {}".format(repo_path))
            repo_dir = os.path.join(source_dir, repo_path)
            try:
                repo = git.Repo(repo_dir)
                remote = repo.remote("origin")
            except:
                repo = git.Repo.init(repo_dir)
                remote = repo.create_remote("origin", remote)

            fetchInfos = remote.fetch()
            repo.active_branch.checkout()
            remote.pull()
            for item in fetchInfos:
                if item.flags == git.FetchInfo.NEW_HEAD:
                    # todo: when new branch
                    pass
                if item.flags == git.FetchInfo.HEAD_UPTODATE:
                    pass
            logger.info("fetch {} finished".format(repo_path))
        except Exception as e:
            logger.error(e)
    
    @staticmethod
    def make_targz(output_filename, source_dir):
        """
        一次性打包目录为tar.gz
        :param output_filename: 压缩文件名
        :param source_dir: 需要打包的目录
        :return: bool
        """
        try:
            with tarfile.open(output_filename, "w:gz") as tar:
                tar.add(source_dir, arcname=os.path.basename(source_dir))

            return True
        except Exception as e:
            logger.error(e)
            return False

    def add_issue_to_repo(self, in_q):
        pass

    def exec(self):

        q = Queue()
        addIssue = Thread(target=self.add_issue_to_repo, args=(q,))
        addIssue.start()

        self.init_source_dir(self._to_dir)

        with open(self._repolist_dir, 'r') as f:
            data = yaml.load(f, yaml.Loader)

        logger.info("now fetch upstream source code")
        start_time = time.time()
        cpu_count = multiprocessing.cpu_count()
        with ThreadPoolExecutor(max_workers = cpu_count) as t:
            all_task = []
            for item in data['projects']:
                all_task.append(t.submit(self.init_repo, item["path"], os.path.join(self._to_dir,"src"), item["name"]))
            wait(all_task, return_when = ALL_COMPLETED)
        end_time = time.time()
        logger.info("fetch upstream source code finished, duration time {}".format(end_time - start_time))

        if os.path.exists(os.path.join(self._to_dir, "src") + "_bak.tar.gz"):
            os.remove(os.path.join(self._to_dir, "src") + "_bak.tar.gz")

        # add source to tar.gz
        logger.info("add {}_bak.tar.gz processing".format(os.path.join(self._to_dir,"src")))
        start_time = time.time()
        isOk = self.make_targz(os.path.join(self._to_dir, "src") + "_bak.tar.gz", os.path.join(self._to_dir, "src"))
        if isOk:
            end_time = time.time()
            logger.info("add src.tar.gz successful, duration time {}".format(end_time - start_time))
        else:
            logger.info("add src.tar.gz faild")

        shutil.move(os.path.join(self._to_dir, "src") + "_bak.tar.gz", os.path.join(self._to_dir, "src") + ".tar.gz")