# -*- coding: utf-8 -*-
#@Time:2020/5/13 10:09
#@File: mian.py
import urllib.request
import requests
import re
import sqlite3
import time
import pymongo
from selenium import webdriver
# zanpiancms_player
from bs4 import BeautifulSoup
import sqlite3
findImg = re.compile(r'style=".*//(.*?)\)')
findUrl = re.compile(r'href="/(.*?)"')
findName = re.compile(r'title="(.*?)"')
findActor = re.compile(r'<div class="subtitle text-muted text-overflow hidden-xs">(.*?)</div>')
findUpdate = re.compile(r'<span class="note text-bg-r">(.*?)</span>')
findScore = re.compile(r'<span class="score">(.*?)</span>')
findTypes = re.compile(r'<li class="col-md-12 text"><span class="hidden-xs">类型：</span>(.*?)</li>')
findType = re.compile(r'target="_blank">(.*?)</a>')
findLang = re.compile(r'语言/字幕：</span>(.*?)</li>')
findDate = re.compile(r'首播时间：</span>(.*?)</li>')
findPlay = re.compile(r'src="(.*?)"')
findCollections = re.compile(r'第(.*?)集')
findUrls = re.compile(r'(.*?)[0-9].*')

urlArr = []
arr = []
nums = 1;
num = 1

driver = webdriver.Firefox()
options = webdriver.FirefoxProfile()
options.set_preference('permissions.default.image', 2)
b = webdriver.Firefox(options)

def main():
    oumei()


def oumei():
    baseurl = 'https://www.jisuysw.com/index.php?s=home-vod-type-id-19-mcid--area--year--letter--order-hits-picm-1-p-'
    for i in range(0,177):
        url = baseurl + str(i+1)
        detail(url)



def detail(url):
    head = {
        'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'
    }
    obj = {}
    global nums
    try:
        time.sleep(1)
        # repose = requests.get(url,headers=head)
        res = urllib.request.Request(url,headers=head)
        repose = urllib.request.urlopen(res)
        data = repose.read().decode()
        repose.close()
        soup = BeautifulSoup(data, 'html.parser')
        # print(repose.read().decode())
        # print('sdfjk')
        item = soup.find_all(class_='col-md-2')
        # print(item)

        for i in item:
            obj['name'] = re.findall(findName, str(i))[0]
            obj['url'] = 'https://www.jisuysw.com/' + re.findall(findUrl, str(i))[0]
            obj['actor'] = re.findall(findActor, str(i))[0]
            obj['score'] = re.findall(findScore, str(i))[0]
            obj['ids'] = nums
            nums += 1

            vedioDetail(obj)
    except:
        obj['status'] = 'error1'
    # print(data)




def vedioDetail(obj):
    head = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'
    }
    time.sleep(1)
    try:
        res = urllib.request.Request(obj['url'], headers=head)
        repose = urllib.request.urlopen(res)
        data = repose.read().decode()

        repose.close()
        soup = BeautifulSoup(data, 'html.parser')
        vedioUrl = soup.find_all(class_='video-pic')
        details = soup.find_all(class_='info')
        obj['href'] = 'https://www.jisuysw.com/' + re.findall(findUrl, str(vedioUrl))[0]
        types = re.findall(findTypes, str(details))
        obj['type'] = re.findall(findType, types[0])
        obj['date'] = re.findall(findDate, str(details))
        obj['lang'] = re.findall(findLang, str(details))
        obj['img'] = re.findall(findImg, str(vedioUrl))
        playUrl(obj)
    except:
        obj['status'] = 'error2'


    # sql = '''
    #               insert into movie(ids,name,score,actor,date,lang,type,url,img,href)
    #               values('%d','%s','%s','%s','%s','%s','%s','%s','%s','%s')
    #       ''' %(obj['ids'],obj['name'],obj['score'],obj['actor'].replace("'",""),','.join(obj['date']),obj['lang'][0],','.join(obj['type']),obj['url'],obj['img'],obj['href'])
    #
    # print(sql)
    # return
    # cursor.execute(sql)
    # conn.commit()
    # conn.close()
    # print('添加成功一条')
def playUrl(obj):
    head = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'
    }

    try:
        time.sleep(2)
        res = urllib.request.Request(obj['href'],headers=head)
        repose = urllib.request.urlopen(res)
        data = repose.read().decode()
        repose.close()
        # print(data)
        urs = re.findall(findUrls, obj['href'])

        soup = BeautifulSoup(data, 'html.parser')
        collections = soup.find_all(id='con_playlist_2')
        cols = re.findall(findCollections, str(collections))
        cols = len(cols)
        arr = []
        for i in range(0, cols):
            newUrl = urs[0] + '/2-' + str((i + 1)) + '.html'
            arr.append(reGet(newUrl))
            print(newUrl)
        obj['url_arr'] = arr
        saveData(obj)
    except:
        obj['status'] = 'error3'

def reGet(url):
    print(url)
    try:

        b.get(url)
        time.sleep(2)
        content = b.page_source
        findurl = re.compile(r'class="zanpiancms-play-iframe" src="(.*?)"')
        urlplay = re.findall(findurl, str(content))[0]

        return urlplay
    except:
        return

#
# conn = sqlite3.connect('movies')
# cursor = conn.cursor()


def saveData(obj):
    try:

        conn = pymongo.MongoClient(host='localhost', port=27017)
        print('连接成功')
        db = conn.oumei
        mycol = db.movies

        mycol.insert_one(obj)
        data = mycol
        # myhah.delete_one({'name': '张三'})
        mos = mycol.find()
        for i in mos:
            print(i)
    except:
        return




if __name__ == "__main__":
    main()
    # playUrl()
    # initData('oumei')
    # saveData()