#! /usr/bin/env python
# -*- coding: utf8 -*-
#
#@auther www
#@date 2014-06-21
#
#crawl http://manre.me/
#
#

from scrapy.selector import Selector
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.contrib.spiders import CrawlSpider, Rule
from gaodeha_crawl.common.db_utils import DbUtils 
from gaodeha_crawl.common.constant import *
from gaodeha_crawl.items import PostItem
from scrapy import log
import time
import datetime
import re
import random

class ManreSpider(CrawlSpider):
    name = 'manre'
    allowed_domains = ['manre.me']
    start_urls = ['http://manre.me/page/1']
    max_brief_index = 256
    category_urls = {
                "http://manre.me/page/1" : {
                        "category_id": "108200",
                        "category_name": "慢热生活",
                        "author": ["曼曼","小蘑菇", "盖地虎"],
                    },
            }
    rules = (
             Rule(SgmlLinkExtractor(allow='/page/\d+\.html', allow_domains=['manre.me']), follow=True),
             Rule(SgmlLinkExtractor(allow='/archives/[\d/\.]*\.html', allow_domains=['manre.me']), callback='parse_item'),
            )


    def __init__(self):
        CrawlSpider.__init__(self)
        self.db = DbUtils()
        sql = "select category_id, category_name from tb_category where category_show_status=1 and category_id=%s"
        for url in self.category_urls.keys():
            category_value = self.category_urls[url]
            category = self.db.fetchOneDict(sql, (category_value["category_id"],))
            if category:
                #self.category_urls[url] = category
                pass
            else:
                #self.start_urls.remove(url)
                tsql = "insert into tb_category (category_id, category_name) values (%s, %s)"
                self.db.executeSql(tsql, (category_value["category_id"], category_value["category_name"]))


    def parse_item(self, response):
        log.msg("crawl parse url:" + response.url, level=log.DEBUG)
        item = PostItem()
        selector = Selector(response)
        postTitle = selector.xpath('//h1[@class="title"]/text()').extract()
        item['post_title'] = postTitle[0]
        item['post_content'] = self.parse_item_content(selector)
        item['post_type'] = POST_TYPE_MIX
        item['post_author'] = self.parse_item_author(response.url)
        item['post_status'] = POST_STATUS_CHECK
        item['post_brief'] = self.parse_item_brief(selector)
        item['comment_count'] = 0
        item['praise_count'] = 0
        item['hate_count'] = 0
        item['collect_count'] = 0
        item['share_count'] = 0
        item['post_category_id'] = self.parse_item_category_id(response.url)
        item['post_category_name'] = self.parse_item_category_name(response.url)
        item['post_origin_url'] = response.url
        item['post_origin_sitename'] = '慢热网'
        item['post_uuid'] = response.url
        item['post_insert_time'] = int(time.time())
        item['post_show_time'] = self.parse_item_time(selector)
        item['post_update_time'] = int(time.time())
        item['post_font_images'] = ''
        item['post_vedio_url'] = ''
        item['post_tags'] = ''
        item['top_order'] = 100
        return item

    def parse_item_content(self, selector):
        textNodeDirect = selector.xpath('//div[@class="enrty"]/p/text()|//div[@class="enrty"]/div[@class!="ad_single"]/img|//div[@class="enrty"]/center/img|//div[@class="enrty"]/p//img').extract()
        text = ""
        for node in textNodeDirect:
            text = text + '<p>' + node.strip() + '</p>'
        return text

    def parse_item_brief(self, selector):
        return ""


    def parse_item_time(self, selector):
        return int(time.time())


    def parse_item_author(self, url):
        return random.choice(self.category_urls["http://manre.me/page/1"]["author"])

    def parse_item_category_id(self, url):
        return self.category_urls["http://manre.me/page/1"]["category_id"]

    def parse_item_category_name(self, url):
        return self.category_urls["http://manre.me/page/1"]["category_name"]



