# -*- coding: utf-8 -*-
import MySQLdb
import scrapy
import json
import datetime
import time
from urllib import parse
from scrapy import Request

from horsveision.items import CsrcItem


class FenghuangSpider(scrapy.Spider):
    name = 'fenghuang'
    allowed_domains = ['finance.ifeng.com']
    start_urls = ['http://finance.ifeng.com/shanklist/1-66-']
    db = MySQLdb.connect('localhost', 'root', '698a80e4212ba38f', 'toujiao', charset='utf8', use_unicode=True)
    # db = MySQLdb.connect('192.168.100.101', 'root', 'root', 'haohsi', charset='utf8', use_unicode=True)


    def parse(self, response):
        content_list = response.xpath("//script[5]/text()").extract()[0].replace("var allData =", "")
        content_all = content_list[0:content_list.rfind("var adData = ", 1)]
        content_all = content_all[0:content_all.rfind(";", 1)]
        content_result = json.loads(content_all.strip("\n "))
        new_list = content_result['newsstream']
        now_date = datetime.datetime.now().strftime("%Y-%m-%d")
        now_time = time.mktime(time.strptime(now_date, "%Y-%m-%d"))

        for value in new_list:
            newDate = value['newsTime'].split(" ")[0]
            newTime = time.mktime(time.strptime(newDate, "%Y-%m-%d"))
            if int(now_time) == int(newTime):
                with self.db.cursor() as cursor:
                    sql = "SELECT title FROM spider_content WHERE title='%s'" % value['title']
                    cursor.execute(sql)
                    result = cursor.fetchone()
                    if result is None:
                        url = value['url']
                        cover_img = value["thumbnails"]['image'][0]['url']
                        yield Request(url=parse.urljoin(response.url, url), callback=self.parse_detail,
                                      dont_filter=True, meta={"cover_img": parse.urljoin(response.url, cover_img)})

    def parse_detail(self, response):
        cover_img = response.meta.get("cover_img", "")
        content_list = response.xpath("//script[5]/text()").extract()[0].replace("var allData =", "")
        content_all = content_list[0:content_list.rfind("var adData = ", 1)]
        content_all = content_all[0:content_all.rfind(";", 1)]
        content_result = json.loads(content_all.strip("\n "))

        fenghuang_item = CsrcItem()
        title = content_result['docData']['title']
        send_time = content_result['docData']['newsTime']
        source = "凤凰财经"
        content = content_result['docData']['contentData']['contentList'][0]['data']

        fenghuang_item['title'] = title
        fenghuang_item['send_time'] = int(time.mktime(time.strptime(send_time, '%Y-%m-%d %H:%M:%S')))
        fenghuang_item['source'] = source
        fenghuang_item['content'] = content
        fenghuang_item['cover_img'] = cover_img

        yield fenghuang_item
