#!/usr/bin/bin python3 
# -*- coding: utf-8 -*-

import pandas as pd
import json
from urllib import request
from urllib import error
import os
import wget
import os
from DBOP import DBOP

import requests

from sshtunnel import SSHTunnelForwarder

import shutil

DBOP = DBOP()

def get_proxy():
    with SSHTunnelForwarder (
        ('192.168.2.239', 22),
        ssh_password='sdp1234',
        ssh_username='sdp',
        remote_bind_address=('127.0.0.1', 5010)) as server:

        result = requests.get("http://127.0.0.1:5010/get/").json()

    return result

def delete_proxy(proxy):

    with SSHTunnelForwarder (
        ('192.168.2.239', 22),
        ssh_password='sdp1234',
        ssh_username='sdp',
        remote_bind_address=('127.0.0.1', 5010)) as server:

        requests.get("http://192.168.2.239:5010/delete/?proxy={}".format(proxy))

def crawl_pkg_list(batch): 
    ''' 从libraries.io上1000一组获取包列表 '''
    file_name = "pkg_list_%s.csv" % batch
    pkg_list = []
    if os.path.exists(file_name):
        temp_list = pd.read_csv(file_name)
        pkg_list.extend(temp_list['name'])
        return pkg_list
    for i in range(10):
        page = batch * 20 + i 
        url = 'https://libraries.io/api/search?order=desc&page=%s&platforms=PyPI&sort=dependents_count&per_page=100&api_key=fd58a5df21d2e87f9c99e10adfba1f1f' % page
        response = requests.get(url)
        response_dict = response.json()
        for pkg in response_dict:
            pkg_list.append(pkg['name'])
        print("succeed to fetch page: ", page)
    dataframe = pd.DataFrame({'name':pkg_list})
    dataframe.to_csv(file_name, index = False)
    return pkg_list

def dowload_pkg_versions(package_name):
    proxy = get_proxy().get("proxy")

    invalid_suffixes = ('.exe')
    data_dir = 'pypi_pkgs'
    url = "https://pypi.org/pypi/%s/json" % package_name

    proxy_support = request.ProxyHandler({"http": "http://{}".format(proxy)})
    opener = request.build_opener(proxy_support)
    opener.addheaders = [('User-Agent','Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36')]
    request.install_opener(opener)
    try:
        response = request.urlopen(url)
    except error.HTTPError:
        # sql = "insert into pkgs(name, seen) values('%s', %d)" % (package_name, 0)
        # DBOP.insertDB(sql)
        # try:
        #     cursor.execute(sql)
        #     db.commit()
        # except:
        #     db.rollback()
        record = open("summary.txt", "a")
        record.write('-------------------------------------------------------\n')
        record.write("failed to fetch {}".format(package_name))
        record.close()
        return

    if response.getcode() != 200:
        print("failed to fetch {}".format(package_name))
        record = open("summary.txt", "a")
        record.write('-------------------------------------------------------\n')
        record.write("failed to fetch {}".format(package_name))
        record.close()
        return 0
    r = response.read().decode("utf-8")
    data = json.loads(r)
    working_dir = os.path.join(os.path.join(data_dir, package_name))
    # if os.path.exists(working_dir):
    #     shutil.rmtree(working_dir)
    # print("succeed to remove %s" % package_name)
    if not os.path.exists(working_dir):
        os.mkdir(working_dir)
    versions = data["releases"].keys()
    for v in versions:
        urls = [ele['url'] for ele in data["releases"][v]]
        new_dir = os.path.join(working_dir, v)
        name = package_name + "@" + v
        sql = "select count(*) from pkgs where name = '%s'" % name
        result = DBOP.selectDb(sql)
        if result[0][0] == 0:   
            if os.path.exists(new_dir):
                shutil.rmtree(new_dir)
                print("succeed to remove %s" % name)
            os.mkdir(new_dir)
            for url in urls:
                if url.endswith(invalid_suffixes):
                    continue
                try:
                    wget.download(url, os.path.join(new_dir))
                except error.HTTPError:
                    continue
            sql = "insert into pkgs(name, seen) values('%s', %d)" % (name, 1)
            DBOP.insertDB(sql)
            print("Succeeded to fetch {}".format(name)) 
    # upload_dir(working_dir)
    print("done")
    print("Succeeded to fetch {}@allversion".format(package_name))          
    sql = "insert into pkgs(name, seen) values('%s@allversion', %d)" % (package_name, 1)
    DBOP.insertDB(sql)
    # try:
    #     cursor.execute(sql)
    #     db.commit()
    # except:
    #     db.rollback()
    record = open("summary.txt", "a")
    record.write('-------------------------------------------------------\n')
    record.write("Succeeded to fetch {}".format(package_name))
    record.close()

if __name__ == "__main__":
    for i in range(20):
        pkg_list = crawl_pkg_list(i)
        # print(pkg_list)
        print("succeed to get pkg_list")
        for pkg in pkg_list:
            sql = "select count(*) from pkgs where name = '%s@allversion'" % pkg
            # cursor.execute(sql)
            # result = cursor.fetchall()
            result = DBOP.selectDb(sql)
            if result[0][0] == 0:
                dowload_pkg_versions(pkg)
        record = open("summary.txt", "a")
        record.write('-------------------------------------------------------\n')
        record.write("Succeeded to fetch the NO.%s batch of packages" % i)
        record.close()