import requests
import json
import csv
import time
from pyquery import PyQuery as jq
import pandas as pd
import logging
from termcolor import colored
import re
import trio
import asks
logging.basicConfig(
format='[%(asctime)s]%(message)s', level=logging.INFO)
Loger = logging.getLogger('zhihu')
asks.init('trio')

def success(txt): return Loger.info(f"{colored(txt, 'green')}")
def error(txt): return Loger.info(f"{colored(txt, 'red')}")
def info(txt): return Loger.info(f"{colored(txt, 'blue')}")
def warning(txt): return Loger.info(f"{colored(txt, 'yellow')}")

session = asks.Session(headers = json.loads(r'''{
	"Accept": "application/json, text/javascript, */*; q=0.01",
	"Accept-Encoding": "gzip, deflate, br",
	"Accept-Language": "zh-CN,zh;q=0.9",
	"Connection": "keep-alive",
	"Host": "m.maoyan.com",
	"Referer": "https://m.maoyan.com/search?searchtype=movie&$from=canary",
	"User-Agent": "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Mobile Safari/537.36",
	"X-Requested-With": "XMLHttpRequest"
}'''),connections=10)

datacol = {0:'剧名',1:'评分',2:'评分人数'}
keylist = []
alllist = []
searchurl = "https://m.maoyan.com/ajax/search?cityId=50&stype=-1&kw="
async def getPingfen(id,key):
	pfurl = f"https://m.maoyan.com/movie/{id}?_v_=yes&channelId=4&cityId=50&$from=canary"
	resp = await asks.get(pfurl)
	alldiv = jq(resp.text)('#app > div > div.movie-page > section.movie-header > div > div.movie-container.clearfix > div.movie-content > div.movie-score > div')
	rating = alldiv('.rating').text()
	num = alldiv('div.score-num').text()
	num = re.findall("\d+",num)[0]
	datas = [key,rating,num]
	success(f'[{key}] {rating}: {num}')
	alllist.append(datas)
async def search(key):
	resp = await asks.get(searchurl+key)
	myjson = json.loads(resp.text)
	if 'movies' in myjson:
		for i in myjson['movies']['list']:
			if i['nm'] == key:
				await getPingfen(i['id'],i['nm'])
	else:
		datas = [key,"暂无","暂无"]
		alllist.append(datas)
		warning(key)
async def main():
	limit = trio.CapacityLimiter(1000)
	with open('剧单.csv',encoding='utf8') as f:
		async with trio.open_nursery() as nursery:
			for line in csv.reader(f):
				nursery.start_soon(search,limit,line[1])


if __name__ == '__main__':
	StartTime = time.time()
	try:
		trio.run(main)  # 启动main 直到结束
	except KeyboardInterrupt:  # 如果是 ctrl+c 导致结束 那不显示任何信息
		pass
	except Exception as e:  # 如果是其他错误，那打个log，颜色红色
		Loger.error(colored(e, 'red'))
	finally:
		Loger.info(colored(f"消耗时间: {int(time.time()-StartTime)}秒"))
		with open('maoyan.json', 'w', encoding='utf-8') as f:
			f.write(json.dumps(alllist,ensure_ascii=False))
		df = pd.DataFrame(alllist)
		df.rename(columns=datacol,inplace=True)
		df.to_excel('maoyan_pf.xlsx')