# coding:utf-8
# 博图列表页
import json
import os
import threading

import facade
from bs4 import BeautifulSoup
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.our_file_dir import BaseDir

curpath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curpath, -2)
dirPath = BaseDir.get_new_path(TopPath, "download", "gzlg_botu", "download", "html")
BaseDir.create_dir(dirPath)
configfile = BaseDir.get_new_path(curpath, "db.ini")


class Downhtml(object):

    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.proxies = {
            "http": "192.168.30.176:8031",
            "https": "192.168.30.176:8031",
        }

        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.108 Safari/537.36',
            'Cookie': 'ASP.NET_SessionId=5wck1vuidcqjp5at5habah45',
            # 'Host':'222.198.130.68',
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
        }

    def select(self):
        sql = "select rawid from article where stat = 0"
        rows = self.mysqlutils.SelectFromDB(sql)
        for row in rows:
            bookid = row[0]
            bookid = bookid.strip()
            url = "http://www.cnbooksearch.com/BookRead.aspx?bookid=" + str(bookid)
            self.down(url, bookid)

    def down(self, url, bookid):
        file_path = os.path.join(dirPath, '%s_%d_%d.big_html' % (
            MDateTimeUtils.get_today_date_strings(), os.getpid(), threading.get_ident()))
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      proxies=self.proxies,
                                                      mark="bookname",
                                                      headers=self.headers,
                                                      timeout=(30, 60))
        if BoolResult:
            if r.text.find("operation error") > -1:
                sql = "update article set stat = -1 where rawid = '%s' " % (bookid)
                self.mysqlutils.ExeSqlToDB(sql)
                return
            soup = BeautifulSoup(r.text, 'lxml')
            div = soup.find('div', id='photo')
            img = div.find('img')
            url = img.get('src')
            sql = "update article set coverurl='{}' where rawid='{}'".format(url, bookid)
            self.mysqlutils.ExeSqlToDB(sql)
            dicts = {}
            dicts["rawid"] = bookid
            dicts["downdate"] = MDateTimeUtils.get_today_date_strings()
            dicts["html"] = r.text
            jsonstrings = json.dumps(dicts, ensure_ascii=False)
            BaseDir.single_add_file(file_path, jsonstrings + "\n")
            print(str(bookid) + '下载成功')
            sql = "update article set stat = 1 where rawid = '%s' " % (bookid)
        else:
            sql = "update article set stat = -2 where rawid = '%s' " % (bookid)
        self.mysqlutils.ExeSqlToDB(sql)


if __name__ == "__main__":
    down = Downhtml()
    down.select()
