#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2018-10-05 23:30:54
# Project: movie_douban_com
import pymongo
from pyspider.libs.base_handler import *


class Handler(BaseHandler):
    crawl_config = {
        'itag': 'v0.0.1'
    }
    @every(minutes=24 * 60)
    def on_start(self):
        self.crawl('http://movie.douban.com/explore',
                   fetch_type='js', js_script="""
               function() {
                   setTimeout("$('.more').click()", 1000);
               }""", callback=self.phantomjs_parser)

    def phantomjs_parser(self, response):
        return [{
            "title": "".join(
                s for s in x('p').contents() if isinstance(s, str)
            ).strip(),
            "rate": x('p strong').text(),
            "url": x.attr.href,
        } for x in response.doc('a.item').items()]

    def on_result(self, result):
        if not result:
            return

        client = pymongo.MongoClient(host='127.0.0.1', port=27017)
        db = client['spider_db']
        coll = db['movie_douban_com']
        data_id = coll.insert(result)
        # print (data_id)
