from bs4 import BeautifulSoup
import requests
import re
import os
import subprocess

class Ubuntu_sources_download:
    pre_url = "https://launchpad.net/ubuntu/+source/"

    def __init__(self, proxies_api_url='', username='', password=''):
        # 代理使用快代理的私密代理，使用API代理方式
        self.api_url = proxies_api_url
        self.username = username
        self.password = password
        self.proxies = dict()
        if self.api_url  != '':
            self.creat_proxies()


    def creat_proxies(self):
        if self.api_url == '':
            raise Exception("api_url is NULL")
        # 快代理的提取一个代理的api
        proxy_ip = requests.get(self.api_url).text

        print(f"申请新代理{proxy_ip}")
        self.proxies = {
            "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": self.username, "pwd": self.password, "proxy": proxy_ip},
            "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": self.username, "pwd": self.password, "proxy": proxy_ip}
        }
        print(self.proxies)

    def fetch_page(self, url, text=False):
        """获取网页内容"""
        try:
            if len(self.proxies) > 0:
                response = requests.get(url, proxies=self.proxies)
            else:
                response = requests.get(url)
            response.raise_for_status()  # 如果请求失败，抛出异常
            if text:
                return response.text
            else:
                return response
        except requests.exceptions.RequestException as e:
            # 生成新的代理
            if re.search(r'Cannot connect to proxy', str(e)):
                self.creat_proxies()
                return self.fetch_page(url)

            print(f"请求错误: {e}")
            return None

    def find_stable_version(self, html):
        soup = BeautifulSoup(html, 'html.parser')
        stables = soup.find_all(string= lambda text: "current stable release" in text.strip())
        p = stables[0].parent.parent
        stable_version_parent = p.find_next_sibling('tr')
        href = stable_version_parent.find('a', attrs={'class':None, 'href':True})['href']
        stable_version_href = "https://launchpad.net/" + href
        return stable_version_href

    def find_sources_url(self, html):
        urls = []
        soup = BeautifulSoup(html, 'html.parser')
        table = soup.find_all('tbody')[1]
        trs = table.find_all_next('tr')
        for tr in trs:
            a = tr.find('td').find('a')
            print(a)
            urls.append(a['href'])
        return urls

    def wget_to_dir(self, urls, dir_path):
        try:
            for url in urls:
                #self.wget(url, dir_path)
                file_name = url.split('/')[-1]
                response = self.fetch_page(url)
                with open(dir_path+'/'+file_name, 'wb') as f:
                    for chunk in response.iter_content(chunk_size=8192):
                        if chunk:
                            f.write(chunk)
                print(f"Download:{file_name}")

        except Exception as e:
            print(e)
            raise e

    def download_source_from_ubuntu(self, source_list):
        source_list = list(dict.fromkeys(source_list))
        print(source_list)
        exist_dir = os.listdir()

        for source in source_list:
            if source not in exist_dir:
                os.mkdir(source)
                script_path = os.path.abspath(__file__)
                script_dir = os.path.dirname(script_path)
                new_dir = script_dir + "/" + source

                url = self.pre_url + source
                html = self.fetch_page(url, text=True)

                stable_url = self.find_stable_version(html)

                html = self.fetch_page(stable_url, text=True)

                source_urls = self.find_sources_url(html)
                print(source_urls)
                try:
                    self.wget_to_dir(source_urls, new_dir)
                    print(f"download Source {source}")
                    print("-"*50)
                    print()
                except Exception as e:
                    print(e)
                    subprocess.run(['rm', '-rf', new_dir])
                    print(f"delete {new_dir}")


if __name__ == "__main__":
    source_list = ["lumino", "hello", "node-html-loader"]
    api_url = 'https://dps.kdlapi.com/api/getdps/?secret_id=oh9jmuclwz5l26719fjn&signature=tj1&format=text&sep=1&dedup=1'
    # use proxy
    #Download = Ubuntu_sources_download(proxies_api_url=api_url, username='d221797890', password='e8gtych')

    # not use proxy
    Download = Ubuntu_sources_download()
    Download.download_source_from_ubuntu(source_list)







    











