# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
from mySpider import settings
import os
from urllib import request

from urllib import error, parse

class BmwPipeline:
    def __init__(self):
        rootPath = os.path.dirname(os.path.dirname(__file__))
        self.path = os.path.join(rootPath, 'images') #os.path.dirname()获取当前文件的路径,os.path.join()获取当前目录并拼接成新目录
        if not os.path.exists(self.path):  # 判断路径是否存在
            os.mkdir(self.path)

    def process_item(self, item, spider):
        #分类存储
        catagory = "test"
        urls = item['image_urls']

        catagory_path = os.path.join(self.path,catagory)
        if  not os.path.exists(catagory_path): #如果没有该路径即创建一个
            os.mkdir(catagory_path)

        print("process_item catagory_path " + catagory_path)
        
        print("process_item urls " + str(urls))
        for url in urls:
            image_name = url.split('/')[-1] #以_进行切割并取最后一个单元
            print("process_item image_name " + image_name)
            encoded_url = parse.quote(url, safe=':/?&=')
            print("process_item encoded_url " + encoded_url)
            request.urlretrieve(encoded_url,os.path.join(catagory_path,image_name))

class MyspiderPipeline:

    def __init__(self):
        self.mysql_host = settings.mysql_host
        self.mysql_user = settings.mysql_user
        self.mysql_password = settings.mysql_password
        self.mysql_db = settings.mysql_db
        self.mysql_port = settings.mysql_port
        self.mysql_db_charset = settings.mysql_db_charset
        self.connect()
        
    def connect(self):
        print("MyspiderPipeline connect ")
        self.conn = pymysql.connect(host=self.mysql_host, port=self.mysql_port, user=self.mysql_user, password=self.mysql_password, db=self.mysql_db, charset=self.mysql_db_charset)
        self.cursor = self.conn.cursor()
        
    def process_item(self, item, spider):
        print("MyspiderPipeline process_item item is : ", item)
        sql = 'INSERT INTO teacher_info (title, name, info) VALUES ("%s", "%s", "%s")' % (item['title'], item['name'], item['info'])
        print('MyspiderPipeline process_item sql is %s' %sql)
        self.cursor.execute(sql)
        self.conn.commit()
        return item
        
    def close_spider(self, spider):
        print("MyspiderPipeline close_spider ")
        self.conn.close()
        self.cursor.close()
