'''
Top 10K crates crawler
~~~~~~~~~~~~~~~~~~~~~~

The list of crates is fetched from the Internet, and may change overtime.
To replicate the experiment results, use `crate_downloader.py` instead of
this module.
'''

import json
import os
import pickle
import sys
import tarfile
import re
from datetime import datetime, timezone
from tempfile import NamedTemporaryFile

import requests

BASE_URL = 'https://crates-io.proxy.ustclug.org/api/v1/crates'
UA = {'User-Agent': 'top_rust_crates_scrapper (yongqi4nchen@163.com)'}
PATH = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = f'{PATH}/crates'
CLEAR_STR = '\b\b\b\b\b     \b\b\b\b\b'
DOWNLOADED_CRATES: 'set[int]' = set()


def crawl(idx: int):
    global DOWNLOADED_CRATES
    if idx in DOWNLOADED_CRATES:
        return
    print(f'Fetching crate {idx}', end=' ... ', flush=True)
    meta = fetch_metadata(idx)
    name, version = meta["id"], meta["newest_version"]
    print(f'{CLEAR_STR}: {name} {version}', end=' ... ', flush=True)
    if version == '0.0.0':
        raise Exception('Unable to fetch deprecated crates')
    url = f'{BASE_URL}/{name}/{version}/download'
    with requests.get(url, headers=UA) as res, NamedTemporaryFile(delete=True) as f:
        res.raise_for_status()
        f.write(res.content)
        f.flush()
        with tarfile.open(f.name, mode='r:gz') as tf:
            tf.extractall(BASE_DIR)
        print(f'{CLEAR_STR} OK')
        DOWNLOADED_CRATES.add(idx)


def fetch_metadata(idx: int):
    url = f'{BASE_URL}?page={idx}&per_page=1&sort=downloads'
    with requests.get(url, headers=UA) as res:
        res.raise_for_status()
        return res.json()['crates'][0]


def load_cache():
    global DOWNLOADED_CRATES
    cache = f'{BASE_DIR}/.cache'
    if os.path.isfile(cache):
        with open(cache, 'rb') as _cf:
            DOWNLOADED_CRATES = pickle.load(_cf)


def dump_cache():
    with open(f'{BASE_DIR}/.cache', 'wb') as cf:
        pickle.dump(DOWNLOADED_CRATES, cf)
        print(f'Cache info saved to {cf.name}')


def generate_summary():
    info = {'timestamp': datetime.now(timezone.utc).astimezone().isoformat(), 'crates_cnt': 0, 'crates': []}
    cnt = 0
    for f in os.listdir(BASE_DIR):
        if os.path.isdir(f'{BASE_DIR}/{f}'):
            parts = f.split('-')
            info['crates'].append({
                'name': '-'.join(parts[:-1]),
                'version': parts[-1]
            })
            cnt += 1
    info['crates_cnt'] = cnt
    with open(f'{PATH}/crates_info.json', 'w+') as f:
        json.dump(info, f)
        print(f'Crates summary saved to {f.name}')


def fix_version():
    '''
    Some crates, such as crate-x.y.z-alpha.123, are not correctly splitted,
    so we need to fix them manually. The function may need to be called multiple
    times to fix all versions, depending on the specific version format.
    '''
    with open(f'{PATH}/crates_info.json', 'r') as f:
        info = json.load(f)
        for crate in info['crates']:
            if not re.match(r'^\d+\.\d+\.\d+.*$', crate['version']):
                print(f'Fixing {crate["name"]}, {crate["version"]}')
                parts = crate['name'].split('-')
                crate['version'] = parts[-1] + '-' + crate['version']
                crate['name'] = '-'.join(parts[:-1])
    with open(f'{PATH}/crates_info.json', 'w') as f:
        json.dump(info, f)


if __name__ == '__main__':
    os.makedirs(BASE_DIR, exist_ok=True)
    load_cache()
    num_crates = int(sys.argv[1]) if len(sys.argv) > 1 else 10000
    for i in range(1, num_crates + 1):
        try:
            crawl(i)
        except Exception as e:
            print(f'{CLEAR_STR} ERROR: {e}')
    dump_cache()
    generate_summary()
    fix_version()
    fix_version()
