import json
import random
import traceback
import requests
from time import sleep
from Redis import RedisClient
from lxml import etree
import os
from datetime import datetime
from config import Dir as devDataDir
from config import Node
import re
from urllib3 import encode_multipart_formdata
from remoteDB import ArticleDB


class IXueShu(object):
    r = RedisClient()
    db = ArticleDB()

    def readInfo(self):
        while True:
            article = self.db.GetOneArticle()
            title = article[0]
            print(title)
            university = article[1]
            years = article[2]
            article_id = article[3]
            id = article[4]
            try:
                headers = {}
                user_agent_list = [
                    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
                    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
                    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
                    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
                    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
                    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
                    "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
                ]
                headers['User-Agent'] = random.choice(user_agent_list)
                adsl = self.r.ADSL()
                proxies = {}
                if adsl:
                    proxies = {
                        'http': 'http://xiaohengheng:950218@' + adsl,
                        'https': 'http://xiaohengheng:950218@' + adsl,
                    }
                date = datetime.now().strftime('%Y%m%d')
                if not os.path.exists(devDataDir + date):
                    os.mkdir(devDataDir + date)
                hours = datetime.now().strftime('%Y%m%d%H')
                path = devDataDir + date + '/' + hours + '/'
                if not os.path.exists(path):
                    os.mkdir(path)
                docDir = path + title + '#' + university + '#' + '.pdf'
                cookies = json.loads(self.r.get_cookies())
                cookieDict = {}
                for cookie in cookies:
                    cookieDict[cookie['name']] = cookie['value']
                response = requests.get("https://www.ixueshu.com/file/" + str(article_id) + ".html", cookies=cookieDict,
                                        proxies=proxies, headers=headers, timeout=10)
                if response.status_code >= 200 and response.status_code < 300:
                    downloadUrl = json.loads(response.content)['message']
                    file = requests.get(downloadUrl, stream=True)
                    if 200 <= file.status_code < 300:
                        with open(docDir, 'wb') as f:
                            f.write(file.content)
                        self.db.UpdateArticleLocalUrl(Node+':'+docDir, id)
                else:
                    self.db.UpdateArticleStatus(title, university, years)
                    print(response.status_code)
            except Exception as e:
                traceback.print_exc()
                self.db.UpdateArticleStatus(title, university, years)
            sleep(3)


    def readInfoFromApp(self):
        while True:
            article = self.db.GetOneArticle()
            title = article[0]
            print(title)
            university = article[1]
            years = article[2]
            article_id = article[3]
            id = article[4]
            try:
                headers = {}
                # user_agent_list = [
                #     "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
                #     "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
                #     "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
                #     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
                #     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
                #     "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
                #     "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
                # ]
                # headers['User-Agent'] = random.choice(user_agent_list)
                headers['token'] = '7ee6e3234e6e4964845edbabd517ff278010'
                headers['platform'] = 'android'
                headers['device-no'] = '3cf3ac139984b811537034e26765aec95'
                headers['User-Agent'] = 'okhttp/3.12.6'
                headers['Connection'] = 'Keep-Alive'
                headers['Accept-Encoding'] = 'gzip'
                adsl = self.r.ADSL()
                proxies = {}
                if adsl:
                    proxies = {
                        'http': 'http://xiaohengheng:950218@' + adsl,
                        'https': 'http://xiaohengheng:950218@' + adsl,
                    }
                date = datetime.now().strftime('%Y%m%d')
                if not os.path.exists(devDataDir + date):
                    os.mkdir(devDataDir + date)
                hours = datetime.now().strftime('%Y%m%d%H')
                path = devDataDir + date + '/' + hours + '/'
                if not os.path.exists(path):
                    os.mkdir(path)
                docDir = path + title + '#' + university + '#' + '.pdf'
                # cookies = json.loads(self.r.get_cookies())
                # cookieDict = {}
                # for cookie in cookies:
                #     cookieDict[cookie['name']] = cookie['value']
                response = requests.get("https://www.ixueshu.com/app/api/document/download?id=" + str(article_id),
                                        proxies=proxies, headers=headers, timeout=10)
                if response.status_code >= 200 and response.status_code < 300:
                    res = json.loads(response.content)
                    if res['error'] == 'success':
                        downloadUrl = res['body']
                        file = requests.get(downloadUrl, stream=True)
                        if 200 <= file.status_code < 300:
                            with open(docDir, 'wb') as f:
                                f.write(file.content)
                            self.db.UpdateArticleLocalUrl(Node+':'+docDir, id)
                        else:
                            print(file.status_code)
                    else:
                        self.db.UpdateArticleStatus(title, university, years)
                        print(response.status_code)
            except Exception as e:
                traceback.print_exc()
                self.db.UpdateArticleStatus(title, university, years)
            sleep(3)


    def post_files(url, header=None, data=None, filename="", filepath=""):
        if data is None:
            data = {}
        if header is None:
            header = {}
        data['file'] = (filename, open(filepath, 'rb').read())
        encode_data = encode_multipart_formdata(data)
        data = encode_data[0]
        header['Content-Type'] = encode_data[1]
        r = requests.post('http://api.samereport.com/v1/admin/cnki/upload', headers=header, data=data)
        print(r.content)


if __name__ == '__main__':
    bot = IXueShu()
    bot.readInfoFromApp()
