import requests
from bs4 import BeautifulSoup
import time
from requests import exceptions
import wget
import os
import sys

pre = "https://kojipkgs.fedoraproject.org//packages/"
file = sys.argv[1]
pkg_list = []
with open(file) as f:
    for s in f:
        pkg_list.append(s.rstrip("\n"))


def get_download_url(name):
    time_start = time.time()
    url_name = pre + name + "/"
    response_name = request_get(url_name)

    soup_name = BeautifulSoup(response_name.text, 'html.parser')
    version = soup_name.find_all("a")[-1].get_text()
    url_version = url_name + version

    order_str = '?C=M;O=D'

    response_version = request_get(url_version + order_str)
    soup_version = BeautifulSoup(response_version.text, 'html.parser')
    fc_version_list = soup_version.find_all("a")

    url_full = ""
    for fc_v in fc_version_list:
        if fc_v.get_text().endswith("fc31/"):
            url_full = url_version + fc_v.get_text() + "noarch/"
            break

    if not url_full:
        print("No fedora 31 version")
        return

    response_full = request_get(url_full)
    soup_full = BeautifulSoup(response_full.text, 'html.parser')
    fc_full = soup_full.find_all("a")[-1].get_text()

    print("pakcage's full name is " + fc_full)
    time_end = time.time()
    t = time_end - time_start
    print("Takes %s second totally" % t)
    url_download = url_full + fc_full
    print("*" * 70)
    return url_download


def request_get(url):
    try:
        t1 = time.time()
        response = requests.get(url, timeout=10)
        t2 = time.time()
    except exceptions.Timeout as e:
        print('Request timed out：'+str(e.response))
        raise
    except exceptions.HTTPError as e:
        print('http request wrong:'+str(e.response))
        raise
    else:
        # 通过status_code判断请求结果是否正确
        print('Request takes %ss'%(t2-t1))
        if response.status_code == 200:
            print('Connect successfully, status code = 200')
            return response
        else:
            print('Connection failed：'+str(response.status_code)+','+str(response.reason))
            print('No package found')


def download_url_list(pkg_list):
    d_list = []
    for pkg in pkg_list:
        full_url = get_download_url(pkg)
        d_list.append(full_url)
    return d_list


if __name__ == "__main__":
    d_list = download_url_list(pkg_list)
    os.mkdir("noarch_rpm")
    for url in d_list:
        wget.download(url, out="noarch_rpm")
    print("Done!")
