# 从repo.harmonyos.com市场爬取元数据，转换成json文件存储

from urllib import request as req
import os
import json
import time

repo_harmony_url_prefix = 'https://repo.harmonyos.com/hpm/registry/api'
pkgs_url = repo_harmony_url_prefix + '/bundles?condition={"orderBy":[{"field":"downloads","orderType":"DESC"}],"matchBy":[{"field":"name","opt":"CONTAIN","value":""}]}&pageSize=%d&curPage=%d'
pkg_url = repo_harmony_url_prefix + '/bundles/detail/{pkg_name}'
dependencies_url = repo_harmony_url_prefix + '/bundles/{pkg_name}/{pkg_version}/dependencies'
dependents_url = repo_harmony_url_prefix + '/bundles/{pkg_name}/{pkg_version}/dependents'


def build_relation(relations):
    """
    根据依赖信息提取需要的简化信息并返回为依赖数组
    """
    return [{'name': relation['name'], 'version': relation['version']} for relation in relations]


def build_package(pkg, dependencies, dependents):
    """ 根据包相关信息构建包并返回为 json 对象
    :param pkg: 包基础信息
    :param dependencies: 包依赖信息
    :param dependents: 包被依赖信息
    :return: 包 json 对象
    """
    pkg_json = {'name': pkg['name'],
                'version': pkg['version'],
                'info': pkg['manifest']['description'] if 'description' in pkg['manifest'] else '',
                'publishTime': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(pkg['publishTime']) / 1000)),
                'dependencies': build_relation(dependencies),
                'dependents': build_relation(dependents),
                'license': pkg['manifest']['license'],
                'repository': pkg['manifest']['repository'] if 'repository' in pkg['manifest'] else '',
                'publishAs': pkg['manifest']['publishAs'],
                'changeLogs': pkg['changeLogs'],
                'download': pkg['download']}
    return pkg_json


def meta_miner(page_size, cur_page):
    """ 分页元数据爬取并添加到json对象中
    :param page_size: 爬取一页的量
    :param cur_page: 爬取页数
    :return: 无返回值
    """
    res = req.urlopen(pkgs_url % (page_size, cur_page))
    # 获取所有包数据
    res_json = json.loads(res.read().decode('utf-8'))
    pkg_names = [data['name'] for data in res_json['data']['datas']]
    pkgs_json = {}
    for idx, name in enumerate(pkg_names):
        # 获取某个包的所有版本
        res = req.urlopen(pkg_url.format(pkg_name=name))
        res_json = json.loads(res.read().decode('utf-8'))
        versions_json = {}
        print(idx + 1, end=': ')
        for pkg in res_json['data']:
            version = pkg['version']
            # 获取具体版本的包的依赖信息
            res = req.urlopen(dependencies_url.format(pkg_name=name, pkg_version=version))
            dependencies_data = json.loads(res.read().decode('utf-8'))
            # 获取具体版本的包的被依赖信息
            res = req.urlopen(dependents_url.format(pkg_name=name, pkg_version=version))
            dependents_data = json.loads(res.read().decode('utf-8'))
            # 将具体版本具体包存入版本集合中
            versions_json[version] = build_package(pkg, dependencies_data['data'], dependents_data['data'])
            print(f'{name}-{version}爬取成功  ', end='')
        print()
        # 将各个版本作为json加入pkgs
        pkgs_json[name] = versions_json
    print(f'{cur_page}, {len(pkg_names)}')
    return pkgs_json


def format_json(all_json):
    """ 调整json格式为想要的格式并fix源数据的一些错误
    :param all_json: 源json数据
    :return all_json本身
    """
    dependencies = {}
    dependents = {}
    # 修复官网 dependencies 和 dependents 不对应或更新不及时问题
    expired_pkgs = {}
    for name, val1 in all_json.items():
        for version, pkg in val1.items():
            pkg_id = name + version
            for deps_json in all_json[name][version]['dependencies']:
                n = deps_json['name']
                v = deps_json['version']
                tmp_id = n + v
                if n not in all_json or v not in all_json[n]:
                    # 如果依赖节点不存在说明包过期被淘汰了或者版本不对应，打上 expired=true 的属性
                    if n not in expired_pkgs:
                        expired_pkgs[n] = {}
                    expired_pkgs[n][v] = {'name': n, 'version': v, 'expired': 'true'}
                if pkg_id not in dependencies:
                    dependencies[pkg_id] = set()
                if tmp_id not in dependents:
                    dependents[tmp_id] = set()
                dependencies[pkg_id].add(tmp_id)
                dependents[tmp_id].add(pkg_id)
            for depts_json in all_json[name][version]['dependents']:
                n = depts_json['name']
                v = depts_json['version']
                tmp_id = n + v
                if n not in all_json or v not in all_json[n]:
                    # 如果依赖节点不存在说明包过期被淘汰了或者版本不对应，打上 expired=true 的属性
                    if n not in expired_pkgs:
                        expired_pkgs[n] = {}
                    expired_pkgs[n][v] = {'name': n, 'version': v, 'expired': 'true'}
                if tmp_id not in dependencies:
                    dependencies[tmp_id] = set()
                if pkg_id not in dependents:
                    dependents[pkg_id] = set()
                dependencies[tmp_id].add(pkg_id)
                dependents[pkg_id].add(tmp_id)
    for name, val1 in expired_pkgs.items():
        if name not in all_json:
            all_json[name] = expired_pkgs[name]
        else:
            all_json[name].update(expired_pkgs[name])
    for name, val1 in all_json.items():
        for version, pkg in val1.items():
            pkg_id = name + version
            all_json[name][version]['dependencies'] = list(dependencies[pkg_id] if pkg_id in dependencies else [])
            all_json[name][version]['dependents'] = list(dependents[pkg_id] if pkg_id in dependents else [])
    return all_json


def mkdir(folder):
    if not os.path.exists(folder):  # 是否存在这个文件夹
        os.makedirs(folder)


def summarize(file_path, dist_dir):
    """ 汇总分页json文件为一个总文件，便于分析
    :param file_path: 汇总文件路径
    :param dist_dir: 目标分页文件所在的目录
    :return: 无返回值
    """
    all_json = {}
    for file_name in os.listdir(dist_dir):
        with open(f'{dist_dir}/{file_name}', 'r', encoding='utf8') as fp:
            cur_pkgs_json = json.load(fp)
            all_json.update(cur_pkgs_json)
    with open(file_path, "w", encoding='utf-8') as fp:
        fp.write(json.dumps(format_json(all_json), ensure_ascii=False, indent=2, separators=(',', ': ')))


def main():
    dist_folder = './pkg_origin'
    store_folder = './pkg'
    mkdir(dist_folder)
    mkdir(store_folder)
    page_size = 100
    for i in range(1, 11):
        pkg_page_json = meta_miner(page_size, i)
        start = (i - 1) * page_size + 1
        pkg_page_json_file_path = store_folder + '/pkgs_origin[%d,%d].json' % (
            start, start + len(pkg_page_json.keys()) - 1)
        with open(pkg_page_json_file_path, "w", encoding='utf-8') as f:  # 设置 utf-8 编码
            f.write(json.dumps(pkg_page_json, ensure_ascii=False, indent=2, separators=(',', ': ')))  # 格式化json字符串再存储
    summarize(f'{store_folder}/pkgs_all.json', dist_folder)


def check_pkg_number():
    from graph_util import load_pkgs
    json1 = load_pkgs('pkg/pkgs_origin_all.json')
    json2 = load_pkgs()
    print(f'{len(json1.keys())}, {sum([len(vals.keys()) for vals in json1.values()])}')
    print(f'{len(json2.keys())}, {sum([len(vals.keys()) for vals in json2.values()])}')


if __name__ == '__main__':
    main()
