# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json,requests,os,pymysql
from scrapy.exceptions import DropItem

class writeJson_images(object):

	def __init__(self):
		self.file = open('dangdang.json', 'wb')

	def process_item(self, item, spider):
		content = (json.dumps(dict(item),ensure_ascii = False)+',\n').encode('utf8')
		self.file.write(content)
		self._getImg(item['bookImg'],item['bookName'])
		return item

	def close_spider(self,spider):
		self.file.close()


	def _getImg(self,imagesUrl,bookName):
		suffix ='.' + imagesUrl.split('.')[-1]
		res = requests.get(imagesUrl)
		if os.path.exists('./书籍封面图片') == False:
			os.mkdir('./书籍封面图片')
		with open('./书籍封面图片/'+bookName+suffix,'wb+') as f:
			f.write(res.content)

class save_database(object):
	def __init__(self,host,user,password,database,port):
		self.host = host
		self.user = user
		self.password = password
		self.database = database
		self.port = port

	def open_spider(self, spider):
		'''负责连接数据库'''
		self.db = pymysql.connect(self.host,self.user,self.password,self.database,charset="utf8",port=self.port)
		self.cursor = self.db.cursor()

	@classmethod
	def from_crawler(cls,crawler):
		return cls(
			host = crawler.settings.get("MYSQL_HOST"),
			user = crawler.settings.get("MYSQL_USER"),
			password = crawler.settings.get("MYSQL_PASS"),
			database = crawler.settings.get("MYSQL_DATABASE"),
			port = crawler.settings.get("MYSQL_PORT"),
		)	

	def process_item(self, item, spider):
		bookdata = dict(item)
		sql = 'insert into dangdangbook (bookname,booktitle,bookimg,bookurl,bookprice,bookwritor) values(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\')'%(bookdata['bookName'],bookdata['bookTitle'],bookdata['bookImg'],bookdata['bookUrl'],bookdata['bookPrice'],bookdata['bookWriter'])
		try:
			self.cursor.execute(sql)
			self.db.commit()
			return item
		except Exception as e:
			raise DropItem("pysql insert false :::: %s" % item)

	def close_spider(self,spider):
		self.db.close()
