#!/usr/bin/python
#-*-coding:utf-8-*-

from scrapy import signals
import json
import codecs
from twisted.enterprise import adbapi
from datetime import datetime
from hashlib import md5
import MySQLdb
import MySQLdb.cursors

class SaveToMysql(object):
    """
        save the data to mysql.
    """
    
    def __init__(self, dbpool):
        
        self.dbpool = dbpool
        try:
            client = MongoClient(self.MONGODB_SERVER,self.MONGODB_PORT) 
            self.db = client[self.MONGODB_DB]
        except Exception as e:
            print self.style.ERROR("ERROR(SingleMongodbPipeline): %s"%(str(e),))
            traceback.print_exc()
    
    @classmethod
    def from_crawler(cls, crawler):
        dbargs = dict(
            host=settings['MYSQL_MASTER_HOST'],
            port=settings['MYSQL_MASTER_PORT'],
            db=settings['MYSQL_MASTER_DB'],
            user=settings['MYSQL_MASTER_USER'],
            passwd=settings['MYSQL_MASTER_PASSWORD'],
            charset='utf8',
            cursorclass = MySQLdb.cursors.DictCursor,
            use_unicode= True,
        )
        dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs)
        return cls(dbpool)
    
    
    def process_item(self, item, spider):
       
        d = self.dbpool.runInteraction(self._do_upinsert, item, spider)
        d.addErrback(self._handle_error, item, spider)
        d.addBoth(lambda _: item)
        return d
        
        book_detail = {
            'content':item.get('content'),
            'title':item.get('title',''),
            'author':item.get('author',''),
            'words_num':item.get('words_num',''),
            'read_num':item.get('read_num',''),
            'comment_num':item.get('comment_num',''),
            'link_num':item.get('link_num',[]),
            'push_time':item.get('push_time',''),
            'origin_url':item.get('origin_url',''),
            'update_time':datetime.datetime.utcnow(),
        }
        
        result = self.db['js_contents'].insert(book_detail)
        item["mongodb_id"] = str(result)
        log.debug('result:')
        log.debug(result)
#         log.debug("Item %s wrote to MongoDB database %s/book_detail" %
#                     (result, self.MONGODB_DB),
#                     level=log.DEBUG, spider=spider)
        return item
    
    #将每行更新或写入数据库中
    def _do_upinsert(self, conn, item, spider):
        linkmd5id = self._get_linkmd5id(item)
        #print linkmd5id
        now = datetime.utcnow().replace(microsecond=0).isoformat(' ')
        conn.execute("""
                select 1 from cnblogsinfo where linkmd5id = %s
        """, (linkmd5id, ))
        ret = conn.fetchone()

        if ret:
            conn.execute("""
                update cnblogsinfo set title = %s, description = %s, link = %s, listUrl = %s, updated = %s where linkmd5id = %s
            """, (item['title'], item['desc'], item['link'], item['listUrl'], now, linkmd5id))
            #print """
            #    update cnblogsinfo set title = %s, description = %s, link = %s, listUrl = %s, updated = %s where linkmd5id = %s
            #""", (item['title'], item['desc'], item['link'], item['listUrl'], now, linkmd5id)
        else:
            conn.execute("""
                insert into cnblogsinfo(linkmd5id, title, description, link, listUrl, updated) 
                values(%s, %s, %s, %s, %s, %s)
            """, (linkmd5id, item['title'], item['desc'], item['link'], item['listUrl'], now))
            #print """
            #    insert into cnblogsinfo(linkmd5id, title, description, link, listUrl, updated)
            #    values(%s, %s, %s, %s, %s, %s)
            #""", (linkmd5id, item['title'], item['desc'], item['link'], item['listUrl'], now)
    #获取url的md5编码
    def _get_linkmd5id(self, item):
        #url进行md5处理，为避免重复采集设计
        return md5(item['link']).hexdigest()
    #异常处理
    def _handle_error(self, failue, item, spider):
        log.err(failure)
    
    
    