# -*- coding: utf-8 -*-
"""
__title__ = ''
__author__ = 'Yang'
__mtime__ = '2018/12/5'
# code is far away from bugs with the god animal protecting
    I love animals. They taste delicious.
              ┏┓      ┏┓
            ┏┛┻━━━┛┻┓
            ┃      ☃      ┃
            ┃  ┳┛  ┗┳  ┃
            ┃      ┻      ┃
            ┗━┓      ┏━┛
                ┃      ┗━━━┓
                ┃  神兽保佑    ┣┓
                ┃　永无BUG！   ┏┛
                ┗┓┓┏━┳┓┏┛
                  ┃┫┫  ┃┫┫
                  ┗┻┛  ┗┻┛
"""
"""
数据库结构
    create table doupan_riju_pinlun(
        id int primary key auto_increment,
        name varchar(50),
        url varchar(50),
        pf varchar(10),
        dy varchar(50),
        zy varchar(50),
        lx varchar(50),
        zpgj varchar(50),
        nf varchar(10));
"""

from bs4 import BeautifulSoup
import requests
import pymysql
import time

headers = {'Accept': '*/*',
           'Accept-Language': 'en-US,en;q=0.8',
           'Cache-Control': 'max-age=0',
           'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36',
           'Connection': 'keep-alive',
           'Referer': 'http://www.baidu.com/'
           }
url = 'https://www.douban.com/doulist/3907668/?start={0}&sort=seq&playable=0&sub_type='


def get_html(url):
    page = requests.get(url, headers)
    page.encoding = page.apparent_encoding
    soup = BeautifulSoup(page.text, features='lxml')
    return soup


def parser_list(abstract, s):
    for i in abstract:
        if s in i:
            return i.strip(s)
    return None


def parser_html(soup, cursor, db):
    titles = soup.select('div.title  > a')
    ratings = soup.select('div.rating')
    abstracts = soup.select('div.abstract')
    print(len(titles), len(ratings), len(abstracts))
    for i in range(len(titles)):
        title = titles[i].text.replace(' ', '').replace('\n', '')
        url = titles[i].get('href')
        rating_num = ratings[i].text.replace(' ', '').split('\n')[2]
        abstract = abstracts[i].text.replace(' ', '').replace('\n\n', '\n').split('\n')
        sql = ''
        try:
            sql = "insert into doupan_riju_pinlun values(null,'{0}','{1}','{2}','{3}','{4}','{5}','{6}','{7}')" \
                .format(title.replace('\'', '’'), url, rating_num, parser_list(abstract, '导演:'),
                        parser_list(abstract, '主演:'), parser_list(abstract, '类型:'),
                        parser_list(abstract, '制片国家/地区:'), parser_list(abstract, '年份:'))
            cursor.execute(sql)
        except:
            print(sql)
            print(abstract)
    db.commit()
    db.close()


if __name__ == '__main__':
    for i in range(0, 200, 25):
        db = pymysql.connect(host='localhost', port=3306, user='root', passwd='123', db='pc')
        cursor = db.cursor()
        print(i, url.format(i))
        soup = get_html(url.format(i))
        parser_html(soup, cursor, db)
        time.sleep(10)
