import requests
import json
import csv
import time
from pyquery import PyQuery as jq
import pandas
import logging
from termcolor import colored
import re
import trio
import asks
from contextlib import contextmanager
from pprint import pprint

logging.basicConfig(format="[%(asctime)s]%(message)s", level=logging.INFO)
Loger = logging.getLogger("zhihu")
asks.init("trio")

status = {'success':0,'failed':0}



@contextmanager
def checkTimes(level=3):
    """
        记录yield位置脚本运行时间
    """
    timeStart = time.time()
    yield
    info(f"cost times: {round(time.time()-timeStart,level)}s")

def addsucess():
    status["success"] += 1


def addfailed():
    status["failed"] += 1

def makeStatus():
    return f" ✅:{colored(status['success'],'green')} 🚫:{colored(status['failed'],'red')}] "


def success(txt):
    return Loger.info(f"{makeStatus()}{colored(txt, 'green')}")


def error(txt):
    return Loger.info(f"{makeStatus()}{colored(txt, 'red')}")


def info(txt):
    return Loger.info(f"{makeStatus()}{colored(txt, 'blue')}")


def warning(txt):
    return Loger.info(f"{makeStatus()}{colored(txt, 'yellow')}")


def create_xlsx(datas, columns, filename="res.xlsx"):
    with checkTimes():
        xlsx = pandas.DataFrame(datas)
        xlsx.rename(columns=columns, inplace=True)
        writer = pandas.ExcelWriter(filename, options={"strings_to_urls": False})
        xlsx.to_excel(writer, "data")
        writer.save()
        success(f"Created {filename}")


def create_json(datas, filename="res.json"):
    with checkTimes():
        with open(filename, "w",encoding='utf8') as f:
            f.write(json.dumps(datas, ensure_ascii=False, indent=4))
            success(f"Saved {filename}")


session = asks.Session(headers={
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Accept-Encoding": "gzip, deflate, br",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Connection": "keep-alive",
        "Host": "m.maoyan.com",
        "Referer": "https://m.maoyan.com/search?searchtype=movie&$from=canary",
        "User-Agent": "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Mobile Safari/537.36",
        "X-Requested-With": "XMLHttpRequest"},connections=10)

datacol = {0: "剧名", 1: "评分", 2: "评分人数"}
keylist = []
alllist = []
searchurl = "https://m.maoyan.com/ajax/search?cityId=50&stype=-1&kw="


async def get_ping_fen(id, key):
    try:
        pfurl = f"https://m.maoyan.com/movie/{id}?_v_=yes&channelId=4&cityId=50&$from=canary"
        warning(f"获取评分中...{pfurl}")
        resp = await session.get(pfurl)
        alldiv = jq(resp.text)(
            "#app > div > div.movie-page > section.movie-header > div > div.movie-container.clearfix > div.movie-content > div.movie-score > div"
        )
        rating = alldiv(".rating").text()
        num = alldiv("div.score-num").text()
        if num:
            num = re.findall("\d+",num)[0]
        else:
            num = '暂无'
        datas = [key, rating, num]
        addsucess()
        success(f"[{key}] {rating}: {num}")
        alllist.append(datas)
    except Exception as e:
    	addfailed()
    	error(f"获取评分错误: {e}")


async def search(limit, key):
    async with limit:
        try:
            warning(f"搜索中...{key}")
            resp = await session.get(searchurl + key)
            myjson = json.loads(resp.text)
            # info(myjson)
            if "movies" in myjson:
                for i in myjson["movies"]["list"]:
                    if i["nm"] == key:
                        await get_ping_fen(i["id"], i["nm"])
            else:
                addfailed()
                datas = [key, "暂无", "暂无"]
                alllist.append(datas)
                warning(key)
        except Exception as e:
            addfailed()
            error(f"搜索错误: {e}")


async def main():
    limit = trio.CapacityLimiter(1000)  # 限制同时运行队列1000个，以防止过大队列造成内存不足
    with open("剧单.csv", encoding="utf8") as f:
        async with trio.open_nursery() as nursery:
            for line in csv.reader(f):
                nursery.start_soon(search, limit, line[1])


if __name__ == "__main__":
    with checkTimes():
        try:
            trio.run(main)  # 启动main 直到结束
        except KeyboardInterrupt:  # 如果是 ctrl+c 导致结束 那不显示任何信息
            pass
        except Exception as e:  # 如果是其他错误，那打个log，颜色红色
            error(f'main错误: {e}')
        finally:
            create_json(alllist,"maoyan.json")
            create_xlsx(alllist,datacol,"maoyan_pf.xlsx")
