# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from .helper import SQL
from .items import CarhomeItem, CarParamentsItem


class CarhomePipeline(object):
	def __init__(self, settings):
		self.sql = SQL(settings.get('MYSQL'))

	@classmethod
	def from_crawler(cls, crawler):
		return cls(crawler.settings)

	def process_item(self, item, spider):
		if isinstance(item, CarhomeItem):
			ret = self.sql.select_car(item.get('model_id'))
			if ret[0] == 1:
				print("已经存在!")
				pass
			else:
				name = item.get('name')
				type = item.get('type')
				rate = item.get('rate')
				id = item.get('model_id')
				model = item.get('model')
				indictprice = item.get('indictprice')
				allprice = item.get('allprice')
				secondhandprice = item.get('secondprice')
				self.sql.insert_car_data(id, name, model, type, rate, indictprice, allprice, secondhandprice)
		elif isinstance(item, CarParamentsItem):
			ret = self.sql.select_parm(item.get('model_url'))
			if ret[0] == 1:
				print('已经访问过了')
				pass
			else:
				id = item.get('model_id')
				url = item.get('model_url')
				self.sql.insert_car_parm_data(id, url, item.get('intake'), item.get('horsepower'), item.get('cylinder'),
				                              item.get('driver'), item.get('gear_box'), item.get('gas'),
				                              item.get('size'))
		return item

	def close_spider(self, spider):
		self.sql.close()
