import os
import urllib

import requests
from bs4 import BeautifulSoup

_root_url = "http://app.mi.com"
page_num = 1
while page_num < 20:
    print("开始爬取第"+str(page_num)+"页")
    wbdata = requests.get("http://app.mi.com/topList?page="+str(page_num)).text
    soup = BeautifulSoup(wbdata,"html.parser")
    links = soup.find_all("h5")
    #print(links)
    linksize=len(links)
    for i in range(linksize):
        link = links[i].contents[0]
        #print(link)
        foldername = link.string
        if((foldername=='应用分发')or(foldername=='开发者服务')or(foldername=='联系我们')):
            break
        else:
            print(foldername)
        detail_link = urllib.parse.urljoin(_root_url,str(link["href"]))
        #print(detail_link)  #获取了下载页面的网址
        package_name = detail_link.split("=")[1]
        #print(package_name)  #获取了包名,可以全部显示
        try:
            download_page = requests.get(detail_link).text
        except Exception as e:
            continue
        soup1 = BeautifulSoup(download_page,"html.parser")
        try:
            download_link = soup1.find(class_="download")["href"]
            #print(download_link)
        except Exception as e:
            failed = open("E:/failed(小米).txt","a+")
            failed.write(foldername+':'+'download_link:'+download_link+'\n')
            failed.close()
            continue
        try:
            download_url = urllib.parse.urljoin(_root_url,str(download_link))   #获取了下载路径
            print(download_url)
        except Exception as e:
            failed = open("E:/failed(小米).txt","a+")
            failed.write(foldername+':'+'download_url:'+download_url+'\n')
            failed.close()
            continue
        try:
            if(os.path.exists("E:/xiaomi/"+str(foldername))==True):
                break
            else:
                os.mkdir("E:/xiaomi/"+str(foldername))  #创建目录文件夹
                full_path = "E:/xiaomi/"+str(foldername)+"/"+str(foldername)+".txt"
                file = open(full_path,"a+")
        except Exception as e:
            failed = open("E:/failed(小米).txt","a+")
            failed.write(foldername+'\n')
            failed.close()
            continue
        localpath = os.path.join("E:/xiaomi/"+str(foldername),package_name+".exe")
        try:
            urllib.request.urlretrieve(download_url,localpath)
        except Exception as e:
            failed = open("E:/failed(小米).txt","a+")
            failed.write(foldername+':'+'download_url:'+download_url+'\n')
            failed.close()
            continue
        download_content1 = soup1.find_all(style='float: left')
        download_content2 = soup1.find_all(style='float:right;')
        download_content3 = soup1.find_all('span',class_="app-intro-comment")
        size3=len(download_content3)
        for i in range(size3):
            content3 = download_content3[i]
        print(content3.string)
        file.write(content3.string+'\n')
        download_content4 = soup1.find_all('p')[0]
        print(download_content4.text)
        file.write(download_content4.text+'\n')
        size = len(download_content1)
        for i in range(size):
            try:
                content1 = download_content1[i]
                content2 = download_content2[i]
                print(content1.string+" : "+content2.string)
                file.write(content1.string+" : "+content2.string+'\n')
            except Exception as e:
                continue
        print('\n')
        file.close()
    page_num = page_num + 1