from globelly import *
import urllib.request
import datetime
from log import write_log

Depot = ["base","updates"]
CPU = ["x86_64","aarch64"]
# Depot = ["updates"]
# CPU = ["aarch64"]


def project_init():
    workdir_init()
    index_to_url()
    # print("URL任务队列：")
    # for mumber in Queue:
    #     print(mumber)


def workdir_init(): # 初始化工作目录，创建工作目录，创建索引，创建databases目录。
    mod_debug_info = 0
    if not os.path.exists(WorkDir):
        os.mkdir(WorkDir)
        os.mknod(Index)
        os.mkdir(WorkDir + 'databases/')
        os.mknod(Logfile)

        if mod_debug_info == 1:
            print("",
                "创建工作目录：{}\n".format(WorkDir),
                "创建索引文件：{}\n".format(Index),
                "创建数据库目录：{}".format(WorkDir + 'databases/'),
                "创建日志文件：{}\n".format(Logfile))
        sys.exit(0)
    else:
        if mod_debug_info == 1:
            print("工作目录 {} 已经存在".format(WorkDir))



def __read_index():  # 读取index文件中的信息。 返回值：文件每行内容组成的列表
    mod_debug_info = 0
    list = []
    with open(Index, 'r', encoding='UTF-8') as f:
        fileinfo = f.read()
        for name in fileinfo.split('\n'):
            if name and not name.startswith('#'):
                list.append(name)

    if mod_debug_info:
        print(list, type(list))

    return list


def __check_url_isok(url):  # 检测网站是否通  返回值：通返回1，不通返回0
    Headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36'}
    response = urllib.request.Request(url=url, headers=Headers)
    try:
        html = urllib.request.urlopen(response)
        return 1
    except:
        return 0


def index_to_url():  # 将index文件中信息，转换成为网站地址列表，判断通断后加入队列  返回值：url队列列表
    mod_debug_info = 0
    global Queue
    for mumber in __read_index():
        for depot in Depot:
            for cpu in CPU:
                url = "https://update.cs2c.com.cn/NS/" + mumber + '/os/adv/lic/' + depot + '/' + cpu + '/Packages/'
                if __check_url_isok(url):   # 判断网站是否可访问
                    Queue.append(url)
                    write_log('assistant', 'URL正确: {}'.format(url))
                else:
                    write_log('assistant','Error ----> URL不正确: {} '.format(url))


    if mod_debug_info:
        print('队列中URL: {} 个 '.format(len(Queue)))
        for i in Queue:
            print(i)


