#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import re
import git
import json
import random
import commands
import datetime
import traceback

from eventlet import sleep
from constants import *


class Collector(object):
    def __init__(self, queue, notify):
        LOG.info("[search] collector thread started.")
        self.queue = queue
        self.notify = notify
        self.projects = set()
        self.patchs = set()
        self.pipe = set()
        self.hash_set = set()
        self._init_projects()

    def _init_projects(self):
        while True:
            if not os.path.exists(RPM_PACKAGING_PATH):
                sleep(5)
            else:
                break

    def _hasalpha(self, inputString):
        for char in inputString:
            if char.isalpha():
                return True
        return False

    def _get_newest_version(self, version_list):
        first_version = None
        for _ver in version_list:
            if not first_version:
                first_version = _ver
                continue
            first_version = self._compare_version(first_version, _ver)
        return first_version

    def _compare_version(self, current_version, new_version):
        d1=re.split('\.', current_version)
        d2=re.split('\.', new_version)
        d1=[int(d1[i]) for i in range(len(d1))]
        d2=[int(d2[i]) for i in range(len(d2))]
        if(d1>=d2):
            return current_version
        if(d1<d2):
            return new_version

    def _handler_upstream(self, upstream, project_name, new_version, depend_id):
        for pname, cur_ver in upstream.items():
            if project_name.lower() != pname.lower():
                continue
            new_ver = self._compare_version(cur_ver, new_version)
            if cmp(cur_ver, new_ver):
                obj = Project(pname, cur_ver, new_ver, depend_id)
                if hash(obj) not in self.hash_set:
                    self.hash_set.add(hash(obj))
                    self.pipe.add(obj)
                    LOG.info("[!!!!!!] cur version: %s, release version: %s" % (cur_ver, new_ver))

    def _handler_normal(self, normal, project_name, new_version, depend_id):
        for pname, cur_ver in normal.items():
            if project_name.lower() != pname.lower():
                continue
            new_ver = self._compare_version(cur_ver, new_version)
            if cmp(cur_ver, new_ver):
                obj = Project(pname, cur_ver, new_ver, depend_id)
                if hash(obj) not in self.hash_set:
                    self.hash_set.add(hash(obj))
                    self.pipe.add(obj)
                    LOG.info("[!!!!!!] cur version: %s, release version: %s" % (cur_ver, new_ver))

    def _get_file(self, url, type):
        try:
            r = requests.get(url, timeout=10, headers={'Connection':'close'})
            with open(PATCH_FILE, "wb") as code:
                code.write(r.content)
            os.system("sed -i '1d' %s" % PATCH_FILE)
        except:
            LOG.error("[search] get %s file failed ! url=%s" % (type, url))
            return {}

        with open(PATCH_FILE, 'r') as f:
            json_file = json.load(f)

        return json_file if json_file else {}

    def _check_release(self, normal, upstream):
        LOG.info("[search] +++++++++++++++++++++++++++")
        LOG.info("[search] + start to check releases +")
        LOG.info("[search] +++++++++++++++++++++++++++")
        patchs_now = set()
        patch_list = self._get_file(RELEASE_PATCH_URL, "patch")
        for patch in patch_list:
            if cmp(patch.get("branch"), "master"):
                continue

            if patch["labels"]["Workflow"].get("blocking", None):
                continue

            depend_id  = patch.get("change_id")
            number = patch.get("_number")

            patchs_now.add(depend_id)
            if depend_id in self.patchs:
                continue
            LOG.info("[search] *************************************************************************")
            LOG.info("[search] check: theme: %s" % (patch.get("subject")))
            LOG.info("[search] number:%s, commit_id:%s" % (number, depend_id))

            file_dict = self._get_file(COMMIT_FILE % number, "commit")
            for file_name, _ in file_dict.items():
                if CURRENT not in file_name and INDEPENDENT not in file_name:
                    continue

                name_list = file_name.split("/")
                project_name = name_list[-1].split(".yaml")[0]
                LOG.info("[search] start to check file : %s.yaml" % project_name)

                _diff = DIFF_FILE.format(number, name_list[1], name_list[2])
                diff_dict = self._get_file(_diff, "diff")
                contents = diff_dict.get("content", None)
                if not contents:
                    LOG.info("[search]")
                    continue

                new_version = None
                flag = False
                for obj in contents:
                    if "b" in obj.keys():
                        if flag:
                            break
                        _list = obj["b"]
                        for _str in _list:
                            if "version:" not in _str:
                                continue
                            new_version = _str.split(":")[-1].split()[0]
                            flag = True
                            break

                if not new_version or self._hasalpha(new_version):
                    LOG.warning("[search] %s get unsupport version: %s." % (project_name, new_version))
                    continue

                self._handler_upstream(upstream, project_name, new_version, depend_id)
                self._handler_normal(normal, project_name, new_version, depend_id)
            LOG.info("[search] =========================================================================")

        self.patchs = patchs_now
        return

    def _soup_find_all(self, soup, class_name, upstream, normal):
        for tags in soup.find_all('tr', class_name):
            tds = tags.find_all('td')
            if tds and tds[0].find_all("span", "std std-ref"):
                project_name = tds[0].a.span.text
                version_list = []
                for td in tds[1:-1]:
                    version = td.text
                    if self._hasalpha(version):
                        version_list = []
                        break
                    version_list.append(version)
                if not version_list:
                    continue
                LOG.info("[search] *************************************************************************")
                LOG.info("[search] check: project_name: %s" % project_name)
                new_ver = self._get_newest_version(version_list)
                self._handler_upstream(upstream, project_name, new_ver, None)
                self._handler_normal(normal, project_name, new_ver, None)
                LOG.info("[search] =========================================================================")
        return

    def _check_index(self, normal, upstream):
        LOG.info("[search] ++++++++++++++++++++++++")
        LOG.info("[search] + start to check index +")
        LOG.info("[search] ++++++++++++++++++++++++")
        try:
            r = requests.get(RELEASES_INDEX_URL, timeout=10, headers={'Connection':'close'})
            context = r.text
        except:
            LOG.error("[search] get releases index.html failed !")
            return

        soup = BeautifulSoup(context, "lxml")
        self._soup_find_all(soup, "row-even", upstream, normal)
        self._soup_find_all(soup, "row-odd", upstream, normal)
        self.hash_set.clear()
        return
        
    def _filter_projects(self):
        normal = {}
        upstream = {}
        unsupport = set()

        for pname in self.projects:
            j2_file = RPM_PACKAGING_PATH + "/openstack/%s/%s" % (pname, pname + ".spec.j2")

            cmd = "grep -o '{%% set upstream_version' %s |wc -l" % j2_file
            n = commands.getoutput(cmd)
            if not int(n):
                with open(j2_file, "rb") as f:
                    content = f.read()
                text = re.findall(r'Version:.*\d', content)
                if text:
                    _ver = text[0].split(":")[-1].split()
                    if self._hasalpha(_ver[0]):
                        unsupport.add(pname)
                    else:
                        normal[pname] = _ver[0]
                continue

            cmd = "grep -o '{%% set upstream_version = upstream_version() %%}' %s |wc -l" % j2_file
            n = commands.getoutput(cmd)
            if int(n) > 0:
                unsupport.add(pname)
                continue

            with open(j2_file, "rb") as f:
                content = f.read()
            text = re.findall(r'upstream_version.*[0-9]', content)
            if text:
                _ver = text[0].split("'")[-1].split()
                if self._hasalpha(_ver[0]):
                    unsupport.add(pname)
                else:
                    upstream[pname] = _ver[0]

        LOG.info("[search] normal = %s, upstream = %s, unsupport = %s." %
                 (len(normal), len(upstream), len(unsupport)))
        return normal, upstream

    def _git_pull(self):
        try:
            os.chdir(RPM_PACKAGING_PATH)
            git.Git().pull()
        except:
            raise TimeoutError("git pull timeout.")

    def _member_update(self):
        cur_project_set = set(os.listdir(RPM_PACKAGING_PATH + "/openstack/"))
        mem_add = cur_project_set - self.projects
        if mem_add:
            for mem in mem_add:
                LOG.info("[search] new member %s added." % mem)
        mem_del = self.projects - cur_project_set
        if mem_del:
            for mem in mem_del:
                LOG.info("[search] old member %s deleted." % mem)
        self.projects = cur_project_set
        LOG.info("[search] %s members now." % len(self.projects))

    def process_scan(self):
        while True:
            if not self.notify.empty():
                depend_id = self.notify.get()
                LOG.info("handle rollback: %s" % depend_id)
                if "destory" == depend_id:
                    self.patchs.clear()
                else:
                    self.patchs.discard(depend_id)

            LOG.info("[search] start to search...")
            start = datetime.datetime.now()
            try:
                self._git_pull()
                self._member_update()
                normal, upstream = self._filter_projects()
                self._check_release(normal, upstream)
                self._check_index(normal, upstream)
            except TimeoutError:
                LOG.error("[search] git pull failed, try again.")
                self.pipe = set()
                continue
            except:
                LOG.error("[search] process_scan failed, %s." % traceback.format_exc())
                self.pipe = set()
                continue

            if self.pipe:
                LOG.info("[search] send %s messages." % len(self.pipe))
                self.queue.put(self.pipe)
                self.pipe = set()

            end = datetime.datetime.now()
            LOG.info("[search] end of search. took %s seconds." % (end - start).seconds)
            sleep(random.randint(300, 600))
