import sys

import requests
import os
from bs4 import BeautifulSoup
import re

curPath = os.path.dirname(os.path.abspath(__file__))
TopPath = os.path.abspath(os.getcwd())
pathlist = TopPath.split("\\")[:-2]
TopPath = "\\".join(pathlist)
AMSPath = os.path.join(TopPath, "download", "ebmbook_jlsti")
FILE_DOWNLOAD_PATH = os.path.join(AMSPath, 'download', "detail")
UserAgent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'


def StatisticsCount(html):
    soup = BeautifulSoup(html, 'lxml')

    div = soup.find('div', class_='productlist')
    if div:
        span = div.find('span', class_='float-left')
        recordString = span.string
        record = int(re.search(r'A total of (\d+)', recordString).groups()[0])
        print(record)

        return record
    return None


def SavePage(html, fileName):
    dirPath = FILE_DOWNLOAD_PATH
    if not os.path.exists(dirPath):
        os.makedirs(dirPath)
    filePath = os.path.join(dirPath, 'fc' + fileName + '.htm')
    with open(filePath, 'w', encoding='utf8') as f:
        f.write(html)
    print('成功写入文件:', fileName)


def DownPage(fc, sesson):
    url = "http://221.8.56.50:85/search.aspx?fc={}".format(fc)
    r = sesson.get(url, timeout=20)
    html = r.text
    soup = BeautifulSoup(html, 'lxml')
    div = soup.find('div', class_='productlist')
    if div:
        span = div.find('span', class_='float-left')
        recordString = span.string
        record = int(re.search(r'A total of (\d+)', recordString).groups()[0])
        nPage = record // 20
        if nPage == 0:
            SavePage(html, fc)
        else:
            for i in range(1, nPage + 2):
                pageData = {
                    '__EVENTTARGET': 'AspNetPager',
                    '__EVENTARGUMENT': '',
                    '__VIEWSTATE': '',
                    '__VIEWSTATEGENERATOR': 'BBBC20B8',
                    '__EVENTVALIDATION': '',
                    'top$searchtext': 'Search for eBooks and authors',
                    'txtkey': '',
                    'drtanda': 'ALL',
                }
                pageData['__EVENTARGUMENT'] = str(i)
                FillPostData(html, pageData)
                r = sesson.post(url, data=pageData, timeout=60)
                if r.status_code == 200:
                    SavePage(r.text, fc + '_' + str(i))
                else:
                    print('*' * 50)
                    print('status_code:', r.status_code)
                    print('*' * 50)


def FillPostData(html, postData):
    soup = BeautifulSoup(html, 'lxml')
    __VIEWSTATE = soup.find('input', id='__VIEWSTATE').get('value')
    __EVENTVALIDATION = soup.find('input', id='__EVENTVALIDATION').get('value')
    postData['__VIEWSTATE'] = __VIEWSTATE
    postData['__EVENTVALIDATION'] = __EVENTVALIDATION

    # print(str(postData))


def main():
    nCount = 0

    postData = {
        '__EVENTTARGET': '',
        '__EVENTARGUMENT': '',
        '__VIEWSTATE': '',
        '__VIEWSTATEGENERATOR': '9C6951B2',
        '__EVENTVALIDATION': '',
        'top$searchtext': 'Search for eBooks and authors',
        'txtuname': 'kjbg160101170',
        'txtupass': 'kjbg160101170',
        'ImageButton1.x': '75',
        'ImageButton1.y': '20'
    }

    sn = requests.Session()
    sn.UserAgent = UserAgent

    url = r"http://221.8.56.50:85/ulogin.aspx?ReturnUrl=%2findex.aspx"
    r = sn.get(url, timeout=30)
    FillPostData(r.text, postData)

    r = sn.post(url, data=postData, timeout=20)

    url = r"http://221.8.56.50:85/"
    r = sn.get(url, timeout=20)
    print(r.status_code)
    soup = BeautifulSoup(r.text, 'lxml')
    div = soup.find('div', class_='menuxiala')
    if div:
        for i in range(1, 21):
            #url = f"http://221.8.56.50:85/search.aspx?fc={i}"

            #r = sn.get(url, timeout=20)

            DownPage(str(i), sn)
    # if div:
    #     for i in range(1, 21):
    #         url = f"http://221.8.56.50:85/search.aspx?fc={i}"
    #         print('menu url:', url)
    #         r = sn.get(url, timeout=20)

    #         num = StatisticsCount(r.text)
    #         nCount += num

    # print('Statistics:', nCount)


if __name__ == '__main__':
    main()
