import requests
import urllib.parse
import re
from requests.adapters import HTTPAdapter
import math
import pymysql
headers = {
		'Connection': 'keep-alive',
		'Cache-Control': 'max-age=0',
		'Upgrade-Insecure-Requests': '1',
		'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.190 Safari/537.36',
		'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
		'Referer': 'http://club.dearedu.com/',
		'Accept-Encoding': 'gzip, deflate',
		'Accept-Language': 'zh-CN,zh;q=0.9,ja;q=0.8'
}
body = {
		'fmdo':'login',
		'dopost':'login',
		'userid':'',
		'pwd':'',
		'login_type':'1'
}
cookies = {}

def login():
	#登陆
	global cookies
	response = requests.post(url="http://club.dearedu.com/member/index_do_new.php",headers=headers,data=body,allow_redirects=False,cookies=cookies)
	cookies = requests.utils.dict_from_cookiejar(response.cookies)
	print(cookies)
def exit():
	#退出登陆
	#api http://club.dearedu.com/member/index_do.php?fmdo=login&dopost=exit
	global cookies

	try:
		req = requests.Session()
		req.mount('http://',HTTPAdapter(max_retries=100))#设置重试次数为10次
		req.mount('https://',HTTPAdapter(max_retries=100))
		response = req.get("http://club.dearedu.com/member/index_do.php?fmdo=login&dopost=exit",headers=headers,cookies=cookies)
	except requests.exceptions.ConnectionError as e:
		print("连接超时")
	
	cookies = {}
def getPaperSub(url,page,req,index):
	#获取试卷
	url = url + '&page=%d&rows=10'%(page)
	try:
		response = req.get(url,headers=headers,cookies=cookies)
	except requests.exceptions.ConnectionError as e:
		print("连接超时")
	
	print(url)
	task = {}
	pattern = re.compile('<li class="contentList">[\s\S]*?</li>')
	title = re.compile('<p class="thisTitle">[\s\S]*?</p>')
	gain = re.findall(pattern,response.text)
	for i in gain:
		tit = re.search(title,i).group()
		aid = re.search(r'(?<=(aid=)).*(\S)',tit).group()
		time = re.search(r'[0-9-]+(?=</span>)',i).group()
		name = re.search(r'(?<=>).+',re.search(r'<a[\s\S]*?a>',tit).group()).group()
		print('aid:%s,name:%s'%(aid,name))
		task[index] = {}
		task[index]['aid'] = int(aid)
		task[index]['name'] = name
		task[index]['time'] = time
		index += 1
	return task
def getPaper(url,page=1):
	task = {}
	index = 0
	try:
		req = requests.Session()
		req.mount('http://',HTTPAdapter(max_retries=100))#设置重试次数为10次
		req.mount('https://',HTTPAdapter(max_retries=100))
		response = req.get(url,headers=headers,cookies=cookies)
	except requests.exceptions.ConnectionError as e:
		print("连接超时")
	
	pageall = math.ceil(int(re.search(r'(?<=(<input type="hidden" value="))[0-9]+',response.text).group())/10)
	print (pageall)
	for i in range(1,min(pageall,page)+1):
		a = getPaperSub(url,i,req,index)
		task.update(a)
		index += len(tuple(a))
		
	return task
def downLoad(task):
	for i in task:
		downLoadSub(task[i]['time']+task[i]['name'],task[i]['aid'])
def downLoadSub(filename,aid):
	#下载
	#http://club.dearedu.com/member/down_gb_iweike.php?zid=6015637&price=0
	
	try:
		req = requests.Session()
		req.mount('http://',HTTPAdapter(max_retries=100))#设置重试次数为10次
		req.mount('https://',HTTPAdapter(max_retries=100))
		response = req.get('http://club.dearedu.com/member/down_gb_iweike.php?zid=%d&price=0'%(aid),headers=headers,stream=True,cookies=cookies)
	except requests.exceptions.ConnectionError as e:
		print("连接超时")
	if('Content-Disposition' in response.headers):
		filename = filename.strip()+'.'+re.search(r'(?<=(\.)).*',response.headers['Content-Disposition']).group()
	else:
		response = req.get(response.headers['Location'],headers=headers,stream=True,cookies=cookies,allow_redirects=False)
		filename = filename.strip()+re.search(r'\.[a-zA-Z]+$',response.headers['Location']).group()
		response = req.get('http://club.dearedu.com/member/down_gb_iweike.php?zid=%d&price=0'%(aid),headers=headers,stream=True,cookies=cookies,allow_redirects=True)
	print("下载:"+filename)
	f = open(filename, "wb")
	for chunk in response.iter_content(chunk_size=512):
	    if chunk:
	        f.write(chunk)
def main():
	requests.packages.urllib3.disable_warnings()
	url = input("Please enter url")
	num = int(input("Number of pages"))
	login()
	task = getPaper(url,num)
	print(task)
	downLoad(task)
	#downLoadSub("a",444520)
	exit()
	
if __name__ == '__main__':
	main()