#!user/bin/python
# -*- coding:UTF-8 -*-
import mysql.connector
import requests
from bs4 import BeautifulSoup
from concurrent.futures import ThreadPoolExecutor


class TicketOfficeCrawler:
    def __init__(self, mysqlconfig, posterFolder):
        self.mysqlconfig = mysqlconfig
        self.posterFolder = posterFolder.rstrip()+"/"

    def initDataBase(self):
        conn = mysql.connector.connect(**self.mysqlconfig)
        cursor = conn.cursor()
        creat_sql = "create table film" \
                    "(" \
                        "rank           int(5)      not null " \
                        "primary key," \
                        "movieName      varchar(80) null," \
                        "showDays       int(8)      null," \
                        "generalOffice  varchar(40) null," \
                        "trueTimeOffice varchar(40) null," \
                        "officePercent  float(8)    null," \
                        "peoplePerField int(8)      null," \
                        "averagePiece   int(8)      null," \
                        "posterSrc      varchar(30) null" \
                    ");"
        cursor.execute(creat_sql)
        conn.commit()
        init_sql = "INSERT INTO film (rank) VALUES (%s);"
        for i in range(25):
            val = (i+1,)
            cursor.execute(init_sql, val)
        conn.commit()
        cursor.close()
        conn.close()

    def __download_thread(self, movieId, filename):
        url = "http://dianying.nuomi.com/movie/detail?movieId="+str(movieId)
        headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,"
                      "image/webp,image/apng,*/*;q=0.8",
            "Cache-Control": "no-cache",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
                          "AppleWebKit/537.36 (KHTML, like Gecko) "
                          "Chrome/70.0.3538.77 Safari/537.36"
        }
        text = requests.get(url, headers=headers).text
        print(movieId, filename)
        bs = BeautifulSoup(text, "lxml")
        poster_src = bs.find(class_="img")["src"]
        print(poster_src)
        poster = requests.get(poster_src, timeout=10).content
        poster_file = open(self.posterFolder+str(filename)+".jpg", "wb+")
        poster_file.write(poster)
        poster_file.close()

    def updateInfo(self):
        url = "http://dianying.nuomi.com/movie/boxrefresh"
        headers = {
            "accept": "application/json, text/javascript, */*; q=0.01",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "cache-control": "no-cache",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
                          "AppleWebKit/537.36(KHTML, like Gecko) "
                          "Chrome/70.0.3538.77 Safari/537.36"
        }
        req = requests.get(url, headers=headers)
        json = req.json()
        json_list = json["real"]["data"]["detail"]
        movie_list = []
        for index, val in enumerate(json_list):
            movie = dict()
            movie["rank"] = index+1
            movie["movieId"] = val["movieId"]
            movie["movieName"] = val["movieName"]
            attribute = val["attribute"]
            movie["showDays"] = attribute["1"]["attrValue"]
            movie["generalOffice"] = attribute["2"]["attrValue"]
            movie["trueTimeOffice"] = attribute["3"]["attrValue"]
            movie["officePercent"] = attribute["4"]["attrValue"]
            movie["peoplePerField"] = attribute["10"]["attrValue"]
            movie["averagePiece"] = attribute["12"]["attrValue"]
            movie["posterSrc"] = self.posterFolder + str(index+1) + ".jpg"
            print(movie)
            movie_list.append(movie)

        # 更新数据
        conn = mysql.connector.connect(**self.mysqlconfig)
        cursor = conn.cursor()
        update_sql = "UPDATE `film` SET movieName = %s,showDays = %s," \
                     "generalOffice = %s,trueTimeOffice = %s," \
                     "officePercent = %s,peoplePerField = %s," \
                     "averagePiece = %s,posterSrc = %s " \
                     "WHERE rank = %s;"
        for item in movie_list:
            val = (item["movieName"], item["showDays"], item["generalOffice"],
                   item["trueTimeOffice"], item["officePercent"],
                   item["peoplePerField"], item["averagePiece"],
                   item["posterSrc"], item["rank"])
            print(val)
            cursor.execute(update_sql, val)
        conn.commit()
        cursor.close()
        conn.close()

        # 开启下载池，下载海报
        threadPool = ThreadPoolExecutor()
        for item in movie_list:
            threadPool.submit(self.__download_thread, item["movieId"],
                              item["rank"])


if __name__ == '__main__':
    Crawler = ticketOfficeCrawler("1", "1")
    Crawler.updateInfo()
