# -*- coding: utf-8 -*-
"""增量拉取所有安装包的地址."""
import requests
import re
from bs4 import BeautifulSoup
import requests
import os
import json
import logging
import yaml

logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S'
                    )

# response = requests.get('https://www.python.org/downloads/release/python-370b3/')


BASE_URL = \
    WORKINGDIR = \
    GRUBIGNORE_NAME = \
    CONFIG_NAME =\
    MD5_NAME = \
    DONE_NAME = \
    VERIFY_NAME = ''


def add_base_url(x):
    if BASE_URL in x:
        return x
    else:
        return BASE_URL.rstrip('/') + '/' + x.lstrip('/')


def get_download_list(html_text):
    """
    获取下载信息。
    params
        :param html_text:网页文本
    return value
        [
            { md5sum:'......',url:'......' },
            { md5sum:'......',url:'......' },
            { md5sum:'......',url:'......' }            
        ]
    """
    soup = BeautifulSoup(html_text, 'lxml')

    md5_list = [i.text for i in soup.find_all(
        'td', text=re.compile('[a-z0-9A-Z]{32}'))]

    if len(md5_list) == 0:  # 适配上古规则
        if soup.pre is not None:
            md5_list = re.findall(r'[a-z0-9A-Z]{32}', soup.pre.text)
        else:
            md5_list = [i.text for i in soup.find_all(
                'tt', text=re.compile('[a-z0-9A-Z]{32}'))]
    # 匹配url
    download_list = [i['href'] for i in soup.find_all(
        'a', href=re.compile(r'ftp.*[pP]ython.*([Ee][xX][Ee]|tgz|dmg|chm|zip|msi|xz|bz2|pkg)$'))]

    md5_list = list(set(md5_list))
    download_list = list(map(add_base_url, download_list))
    download_list = list(set(download_list))

    # print(download_list)
    # 检查数量是否匹配
    # if len(md5_list) > len(download_list):
    #     raise Exception(
    #         'Page Interprater Wrong, the number doesn\'t match', print(md5_list), print(download_list))

    # print(download_list)
    res = []
    # 将获取的 下载列表 与 md5校验合并 ，传回 {md5sum:... ,url:...} 字典组成的列表

    res = list(map(lambda x: x.strip(), download_list))
    # for i in range(len(download_list)):
    #     res.append(dict( url=download_list[i].strip()))
    # print(res,md5_list)
    return res, md5_list


def __is_equal(list1, list2):
    try:
        set1 = set(list1)
        set2 = set(list2)
    except Exception as e:
        logging.warning('exceptions happen when convert : ' + str(e))
        return False

    return set1 == set2


def sync():
    """同步所有."""
    logging.info('current working directory is : ' + os.getcwd())
    os.chdir(WORKINGDIR)
    IGNORELIST = set()
    if os.path.isfile(GRUBIGNORE_NAME):
        with open(GRUBIGNORE_NAME) as file:
            IGNORELIST = set(json.load(file))
    IGNORELIST = ['/' + i + '/' for i in IGNORELIST]
    logging.info('ignorelist : ' + str(IGNORELIST))
    response = requests.get(BASE_URL + '/downloads/source/')
    soup = BeautifulSoup(response.text, 'lxml')
    joblist = []
    for i in soup.find_all('a', href=re.compile(r"/download.*[0-9]")):
        joblist.append(BASE_URL + i['href'])
    joblist = list(set(joblist))
    IGNORELIST = list(set(IGNORELIST))

    logging.info('Start with sync ' + str(len(joblist)) + ' urls')

    if len(IGNORELIST) is not 0:
        joblist = list(
            filter(lambda x: [i for i in IGNORELIST if i not in x], joblist))
    # logging.info(len(joblist))

    for url in joblist:
        logging.info('start sync : ' + url)
        try:
            sync_this(url)
        except requests.exceptions.SSLError:
            logging.info(
                'requets.exceptions.SSLError happen! , broken connection.')
        logging.info('finish')
        os.chdir(WORKINGDIR)
    logging.info('Done with sync ' + str(len(joblist)) + ' urls')


def sync_this(URL):

    the_list, md5_list = get_download_list(requests.get(URL).text)
    search_resault = re.search(r'(/[Pp]ython-.*)|(/[23]\..*)', URL)
    if search_resault is None:
        logging.info(URL + ' didn\'t get anything, exit')
    FILENAME = re.search(
        r'(/[Pp]ython-.*)|(/[23]\..*)', URL).group(0).strip('/')
    ABSNAME = WORKINGDIR.rstrip('/') + '/' + FILENAME
    if os.path.exists(ABSNAME):
        # 是否存在这个目录
        os.chdir(ABSNAME)
        if os.path.isfile(MD5_NAME) and os.path.isfile(CONFIG_NAME):
            # 是否MD5 与 文件列表 存在
            # print('CONFIG_NAME'+' exists')
            with open(MD5_NAME) as file1, open(CONFIG_NAME) as file2:
                present1 = json.load(file1)
                present2 = json.load(file2)

            if __is_equal(present1, md5_list) and __is_equal(present2, the_list):
                logging.info('no change')
                os.chdir(WORKINGDIR)
                return
            else:
                with open(CONFIG_NAME, 'w') as file:
                    json.dump(the_list, file)
                with open(MD5_NAME, 'w') as file:
                    json.dump(md5_list, file)
                if os.path.exists(DONE_NAME):
                    os.remove(DONE_NAME)
                if os.path.exists(VERIFY_NAME):
                    os.remove(VERIFY_NAME)
                logging.info('change')
                os.chdir(WORKINGDIR)
                return
        else:
            # 不存在则增加
            with open(CONFIG_NAME, 'w') as file:
                json.dump(the_list, file)
            with open(MD5_NAME, 'w') as file:
                json.dump(md5_list, file)
            logging.info('add')
            os.chdir(WORKINGDIR)
            return
    else:
        # 不存在则新建并保存
        os.mkdir(ABSNAME)
        os.chdir(ABSNAME)
        with open(CONFIG_NAME, 'w') as file:
            json.dump(the_list, file)
        with open(MD5_NAME, 'w') as file:
            json.dump(md5_list, file)
        os.chdir(WORKINGDIR)
        logging.info('create')
        return


def init(config_file):
    """init,must use before any function."""
    global BASE_URL, WORKINGDIR, GRUBIGNORE_NAME, CONFIG_NAME, MD5_NAME, DONE_NAME, VERIFY_NAME
    with open(config_file) as file:
        dic = yaml.load(file.read())
    BASE_URL = dic['BASE_URL']
    WORKINGDIR = dic['WORKINGDIR']
    GRUBIGNORE_NAME = dic['GRUBIGNORE_NAME']
    CONFIG_NAME = dic['CONFIG_NAME']
    MD5_NAME = dic['MD5_NAME']
    DONE_NAME = dic['DONE_NAME']
    VERIFY_NAME = dic['VERIFY_NAME']
    return config_file


if __name__ == '__main__':
    init('conf.json')  # 请使用绝对路径
    # sync_this('https://www.python.org/downloads/release/python-341/')
    sync()
    logging.info('current working directory is :' + os.getcwd())
