# -*- coding: utf-8 -*-
import json
import codecs
import time
#import xlsxwriter
import MySQLdb
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html


class JianshuPipeline(object):
      def __init__(self):
            #self.i = 1
            self.file = codecs.open("E:\jiaocheng\F\Project\\jianshu\data\\jianshu_info_test.json",'wb',encoding = 'utf-8')
            self.conn=MySQLdb.connect(host='localhost',user='root',passwd='root',db='jianshu',port=3306,charset = 'utf8')
            self.cur=self.conn.cursor()      
            
      def process_item(self, item, spider):
            dic = item
            dicts = json.dumps(dict(dic),ensure_ascii=False)
            line = dicts + '\n'
            self.file.write(line)
            try :
                  value = [ item['name'],item['users_url'] , item['attention'] , item['fans'] ,\
                        item['article'] , item['words_num']  , item['gain_like'],item['intro'] ]
                  self.cur.execute('insert into jianshu_uesr(name,users_url,attention,fans,\
                                    article,words_num,gain_like,intro)\
                                    values(%s,%s,%s,%s,%s,%s,%s,%s)',value)
                  self.conn.commit()

                  print "WRITE******mysql*****SUCCESS"
                  #i = i + 1
            except MySQLdb.Error,e:
                  print "Mysql Error %d: %s" % (e.args[0], e.args[1])
            #time.sleep(3)
            return item
      def close_spider(self,spider):
            self.file.close()
            self.cur.close()
            self.conn.close()

