# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import json
import os
import scrapy
from lagou2 import settings
import requests
import urllib2

class Lagou2Pipeline(object):
    #def __init__(self):
        #self.file = codecs.open('items2.json', 'w', encoding='utf-8')

    def process_item(self, item, spider):
        #line = json.dumps(dict(item),ensure_ascii=False) + ",\n"
        #self.file.write(line.decode('unicode_escape'))
        #self.file.write(line)
        if 'imgurl' in item:
            dir_path = '%s/%s' % (settings.IMAGES_STORE, spider.name)

            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            image_url = item['imgurl']
            us = image_url.split('/')
            us_len = len(us)
            img_name = us[us_len-1]
            image_file_name = '_'.join(us)
            file_path = '%s/%s' % (dir_path, img_name)
            if os.path.exists(file_path):
                print 'can not download file:' + file_path
                return item
            else:
                with open(file_path, 'wb') as handle:
                    print 'download image!'
                    rs = urllib2.urlopen(image_url).read()
                    handle.write(rs)
                    # response = requests.get(image_url, stream=True)
                    # for block in response.iter_content(1024):
                    #     if not block:
                    #         break
                    #     handle.write(block)
                item['img_name']=img_name
        return item

    #def spider_closed(self,spider):
        #self.file.close()
