# -*- coding: utf-8 -*-
import scrapy
from scrapy.http import Request
from scrapy.http import FormRequest
from qiniu import Auth, put_file, etag, urlsafe_base64_encode
import sys
import json
import os
import codecs
import MySQLdb
import urllib
import urllib2
import requests
import time
import datetime
# from PIL import Image
import cStringIO

class XinbangSpider(scrapy.Spider):
    name = "xinbang"
    # mysql_host = "rm-wz9326c6mt06wsrigo.mysql.rds.aliyuncs.com"
    # # mysql_host = "rm-wz9326c6mt06wsrig.mysql.rds.aliyuncs.com"
    # mysql_user = "root_zjx"
    # mysql_passwd = "zjx285831!@#"
    mysql_host = "localhost"
    mysql_user = "root_zjx"
    mysql_passwd = "zjx285831!"

    #qiniu
    ccess_key = 'DC5XOomwrxFJg-8hht7QpC-l7RXK95KcwBXA7XXD'
    secret_key = 'i0XgI-BJo7Wdr6XZZXGWy7SIIazLQOGYWJtwQNGj'
    entry = 'wechatscrapy'

    # allowed_domains = ["xinbang"]
    start_urls = ["http://www.newrank.cn/"]
    statusFileName = 'status.json'
    last_spider_title = ''
    # 新榜 区分公众号
    # form_date =[{'flag': 'false',#有娃随身听
    #                 'uuid':'349370B6DFD9BC695E2E9067561EC437',
    #                 'nonce':'d389db920',
    #                 'xyz':'312f12a82c0a8193a4c8429e0eb78555'},
    #             {'flag':'true',#小道消息
    #                 'uuid':'C1080392D32744FD45596A58F74C7D84',
    #                 'nonce':'7dbe5cf89',
    #                 'xyz':'7c3a0d18f06eae67fc62f9a237ecc6ac'},
    #             {'flag':'true',
    #                 'uuid':'6FE5E942DE90F9304B2E89B5CA2830B6',
    #                 'nonce':'ec2390188',
    #                 'xyz':'87398837f57b0cd15ca9602735b7b2a8'}]
    form_date = []
    db_mysql = ''

    reload(sys)
    sys.setdefaultencoding('utf-8')
    def parse(self, response):
        print "抓取中"
        db = MySQLdb.connect(host=self.mysql_host, user=self.mysql_user,passwd=self.mysql_passwd, charset="utf8")
        cursor = db.cursor()
        cursor.execute("use `%s`;" % "smart_home")
        select_sql = "select * from scrapy_gzh_info"
        try:
            cursor.execute(select_sql)
            result = cursor.fetchall()
            for row in result:
                self.form_date.append({'flag': row[2],
                                'uuid': row[3],
                                'nonce': row[4],
                                'xyz': row[5]})
        except:
            # Rollback in case there is any error
            db.close()
        db.close()

        requests = []
        for formdata in self.form_date:
            print formdata
            cookie = {'cookiejar ': 'tt_token=true; ticket=gQEc8DwAAAAAAAAAAS5odHRwOi8vd2VpeGluLnFxLmNvbS9xLzAya3BmdTBfa0ljbTMxTUw3QU5wMW4AAgQf_eNZAwQQDgAA; token=5AE2DE0E4A3D9739AE8773A08D8B522A; UM_distinctid=15f228487c449f-0a0a994d6187d4-178123e-100200-15f228487c542b; __root_domain_v=.newrank.cn; _qddamta_2852150610=3-0; tt_token=true; ticket=gQEc8DwAAAAAAAAAAS5odHRwOi8vd2VpeGluLnFxLmNvbS9xLzAya3BmdTBfa0ljbTMxTUw3QU5wMW4AAgQf_eNZAwQQDgAA; token=5AE2DE0E4A3D9739AE8773A08D8B522A; _qddaz=QD.4tvpml.3famv4.j8tfbn6f; _qdda=3-1.1; _qddab=3-g65l1t.j8tfbn8p; CNZZDATA1253878005=432930020-1508110936-%7C1508110936; Hm_lvt_a19fd7224d30e3c8a6558dcb38c4beed=1508112632; Hm_lpvt_a19fd7224d30e3c8a6558dcb38c4beed=1508114045'}
            url = "http://www.newrank.cn/xdnphb/detail/getAccountArticle"
            request = FormRequest(url=url, callback=self.get_wecht_info_by_wechat_id,formdata=formdata,meta=cookie)
            # request = Request(url=url, callback=self.get_wecht_info_by_wechat_id,formdata=formdata,cookie=cookie)
            requests.append(request)
        return requests

    def get_wecht_info_by_wechat_id(self,response):
        print response.body
        jsonBody = json.loads(response.body.encode('utf-8'))
        print jsonBody
        articles = jsonBody['value']['lastestArticle']
        account = jsonBody['value']['lastestArticle'][0]['account']
        if os.path.exists(os.getcwd()+'/'+self.statusFileName):
            with codecs.open(self.statusFileName, 'rb+',"utf-8") as statusFile:
                    data = json.load(statusFile)
                    if account in data:
                        self.last_spider_title = data[account]
                    data[account] = articles[0]['title']
                    statusFile.seek(0)
                    statusFile.truncate()
                    statusFile.write(json.dumps(data))
                    statusFile.close()
        else:
            with codecs.open(self.statusFileName, 'wb',"utf-8") as statusFile:
                data = {account:articles[0]['title']}
                statusFile.write(json.dumps(data))
                statusFile.close()


        # print articles[0]['title'].encode("utf-8")
        requests = []
        with open('test.txt','aw+') as f:
            for article in articles:
                article_url = article['url']
                if article['title'] == self.last_spider_title:
                    break
                    #f.close()
                    #sys.exit(0)
                f.write(article_url+"\n")
                request = Request(url = article_url,callback=self.get_article_detail)
                requests.append(request)
        f.close()
        return requests

    def get_article_detail(self,response):
        #是否设置微信分享图片
        is_set_wx_fx_pic = False

        article_url = response.url
        title = response.xpath('//h2[@id="activity-name"]//text()').extract_first()
        if title is None:
            title = ''
        else :
            title = title.strip()
        article_infos = response.xpath('//em[@class="rich_media_meta rich_media_meta_text"]//text()').extract()
        if len(article_infos) >=2:
            redactor = article_infos[1] #编辑
        else:
            redactor = ''
        release_time = article_infos[0]

        wechat_account = response.xpath('//a[@class="rich_media_meta rich_media_meta_link rich_media_meta_nickname"]//text()').extract_first()
        ''##文章信息
        detail_selector = response.xpath('//div[@id="js_content"]')
        detail_info = "".join(response.xpath('//div[@id="js_content"]/node()').extract())

        wx_fx_pic = detail_selector.xpath('.//img[1.5>@data-ratio>0.6]/@data-src').extract_first()
        # wx_fx_pic_selector = wx_fx_pic_selector.xpath('.//img[@data-ratio<1.5]')
        # wx_fx_pic = wx_fx_pic_selector.xpath('.//img[@data-w>200]/@data-src').extract_first()
        # print 'wx_fx_pic=' + wx_fx_pic

        wx_fx_info = "".join(response.xpath('//div[@id="js_content"]//text()').extract())
        if wx_fx_info:
            wx_fx_info = wx_fx_info.strip()[:30]
        else:
            wx_fx_info = ''
        #图片处理
        #待处理 微信分享图片 微信分享内容
        imgs = detail_selector.xpath('.//img/@data-src').extract()

        # for img_url in imgs:
        #     qiniu_info = self.fetchResource(img_url)
        #     # print qiniu_info
        #     if qiniu_info and qiniu_info.has_key("key"):#替换图片
        #         img_url_qiniu = "http://oq2qjgikj.bkt.clouddn.com/"+qiniu_info['key']
        #         # print img_url_qiniu
        #         detail_info = detail_info.replace(img_url,img_url_qiniu)
        #         #分享图片处理
        #
        #         if wx_fx_pic == img_url :
        #             wx_fx_pic = img_url_qiniu
        #         # image_file = cStringIO.StringIO(urllib2.urlopen(img_url_qiniu).read())
        #         # try:
        #         #     img = Image.open(image_file)
        #         #     #    img.show()
        #         #     (x,y) = img.size
        #         #
        #         #     if is_set_wx_fx_pic == False and x >200 and 0.8<= x/y and x/y <=1.5:
        #         #         wx_fx_pic =  img_url_qiniu
        #         #         print 'wx_fx_pic =' + wx_fx_pic
        #         # except Exception,e:
        #         #     print e.message
        detail_info = MySQLdb.escape_string(detail_info.encode('utf-8'))
        create_time = time.strftime("%Y-%m-%d %H:%M:%S")

        #插入数据
        db = MySQLdb.connect(host=self.mysql_host, user=self.mysql_user,passwd=self.mysql_passwd, charset="utf8")
        cursor = db.cursor()
        cursor.execute("use `%s`;" % "smart_home")
        #插入数据的表以及数据
        sql = "insert into `scrapy_article_test`(title,compiler,release_time,content,wechat_account,create_time,wx_fx_pic,wx_fx_info,article_url)VALUES('%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (title.strip(),redactor, release_time, detail_info.strip(),wechat_account,create_time, wx_fx_pic,wx_fx_info,article_url)
        print 'wx_fx_pic=' +wx_fx_pic
        print 'wx_fx_info=' +wx_fx_info
        with open('test1.txt','aw+') as f:
            f.write(sql+'\n')
            f.close()
        try:
            n = cursor.execute(sql)
            print n
            cursor.execute("update scrapy_article set content=REPLACE(content,'data-src','src');")
            sql_gzh_id = "update scrapy_article set wechat_gzh_id=15 where wechat_account='%s'" % wechat_account
            cursor.execute(sql_gzh_id)
            db.commit()
        except Exception,e:
            # Rollback in case there is any error
            print Exception,":",e
            print "mysql save failed %s" % title.strip()
            db.rollback()
        db.close()


    # 七牛云抓取第三方资源 并记录进数据库
    def fetchResource(self, other_url):
        ccess_key = self.ccess_key
        secret_key = self.secret_key

        # url = 'http://iovip.qbox.me' #华东
        url = 'http://iovip-z2.qbox.me'
        body = '/fetch/<EncodedURL>/to/<EncodedEntryURI>'
        content_type = 'application/x-www-form-urlencoded'

        entry = self.entry
        encoded_entry_url = urlsafe_base64_encode(entry)
        other_url = other_url

        encoded_url = urlsafe_base64_encode(other_url)
        body = body.replace('<EncodedURL>',encoded_url).replace('<EncodedEntryURI>',encoded_entry_url)
        url = url+body

        auth = Auth(ccess_key,secret_key)
        authorization =auth.token_of_request(url=url,content_type=content_type)
        authorization = 'QBox ' + authorization

        headerdata =  {'Host':'iovip-z2.qbox.me','Content-Type':content_type,'Authorization':authorization}
        # print(url)
        # print(body)
        # print(authorization)
        r = requests.post(url=url,headers=headerdata)
        # json_str = json.dumps(r.text)
        s1 = json.loads(r.text)
        # print json_str
        # db = MySQLdb.connect(host="localhost", user="alisql",passwd="alisql@277285590", charset="utf8",unix_socket="/soft/mysql/mysql.sock")
        if s1.has_key("key"):
            return s1
        return False
