import pymysql
import re
import datetime
import requests
from multiprocessing.dummy import Pool as ThreadPool

class XLY(object):
    def __init__(self):
        self.host = '127.0.0.1'
        self.db = 'app_mark'
        self.user = 'root'
        self.passwd = '123456'
        self.charset = 'utf8mb4'
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36'
        }
        self.start = datetime.datetime.now()

    def get_urls(self):
        con = pymysql.connect(host=self.host, db=self.db, user=self.user, passwd=self.passwd, charset=self.charset)
        cur = con.cursor()
        sql = 'select link from gly where tag = "0" and sitename = "食品伙伴网"'
        after_sql = 'update gly set tag = "1"'
        try:
            cur.execute(sql)
            results = cur.fetchall()
        except Exception as e:
            con.rollback()
            print('error~', e)
            results = None
        else:
            con.commit()
        cur.close()
        con.close()
        return results

    def download(self, url):
        url = url[0]
        response = requests.get(url, headers=self.headers)
        response.encoding = response.apparent_encoding
        html = response.text
        down_url = re.findall('<a class="telecom" href="(.*?)">', html, re.S)
        try:
            down_url = down_url[0]
            r = requests.get(down_url, headers=self.headers)
            file_name = 'D:/1_work/python采集/PDF/' + down_url.split('auth=')[-1] + '.pdf'
            # print(file_name)  
            with open(file_name, 'wb') as pdf:
                for content in r.iter_content():
                    pdf.write(content)
        except Exception as e:
            print('error_url:{}; exception: {}'.format(url, e))
        print(down_url)


if __name__ == '__main__':
    xly = XLY()
    urls = xly.get_urls()
    if urls:
        # 多线程
        pool = ThreadPool(20)
        pool.map(xly.download, urls)
        pool.close()
        pool.join()
    end = datetime.datetime.now()
    print('耗时: {}'.format(end - xly.start))
        # for url in urls:
            # url = url[0]
            # xly.download(url)
            # break