# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import json
import os
from urllib import request
import scrapy
import datetime
import time
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from scrapy.exceptions import DropItem
# from scrapy.pipelines.files import FilesPipeline
#from DBUtils.PooledDB import PooledDB
#from magnet_link.utils.utils import Utils
#mport pymysql
import re
class myfilesPipeline:
    def __init__(self):
        self.images_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'images')
        self.create_dir(self.images_path)

    def create_dir(self, dir_path):
        if not os.path.exists(dir_path): os.mkdir(dir_path)

    def process_item(self, item, spider):
        print(item)
        urls = item['file_urls']

        path = '%s/%s' %(self.images_path,urls['path'])
        #创建商品目录
        self.create_dir(path)
        #创建主图目录
        mainPath = '%s/%s' % (path, urls['mainPath'])
        #创建详情目录
        detailPath = '%s/%s' % (path, urls['detailPath'])
        self.create_dir(mainPath)
        self.create_dir(detailPath)
        file = open('%s/1.txt'% (path), "a")
        for info in item['detail']:
            file.write("%s:%s\n" % (info['subname'],info['context']))

        file.write("零售价:%s\n" % (item['price']))

        file.write("原价:%s\n" % (item['org_price']))
        file.close()

        for key, image_url in enumerate(urls['main']):
            # print(key)
            # fileName = key
            # now = datetime.datetime.now()
            #
            # time = '%s%s%s%s%s%s'%(now.year,now.month,now.day,now.hour,now.minute,now.second)

            request.urlretrieve(image_url, os.path.join(mainPath,'%s.png'%(key+1)))

        for key1, image_url1 in enumerate(urls['detail']):
            request.urlretrieve(image_url1, os.path.join(detailPath, '%s.png' % (key1 + 1)))

        return item


