# -*- coding: utf-8 -*-
import json, os
import logging
import requests

class EpaperPipeline:

    header = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
                        'Chrome/63.0.3239.132 Safari/537.36'}

    def process_item(self, item, spider):
        filepath = os.getcwd() + '/' + item['dirname'] + '/' + item['date']
        if not os.path.exists(filepath):
            os.makedirs(filepath)
        filename = filepath + '/' + item['article_id'] + '.json'
        self.file = open(filename, 'w', encoding="utf-8")
        line = json.dumps(dict(item), ensure_ascii=False) + "\n"
        self.file.write(line)
        self.file.close()
        self.download_image(item['images'], filepath)
        return item
    
    def download_image(self, images, filepath):
        for url in images.keys():
            filename = filepath + '/' + url.split('/')[-1]
            try:
                r = requests.get(url, headers=self.header, stream=True)
                if r.status_code == 200:
                    with open(filename, 'wb') as fp:
                        fp.write(r.content)
                        fp.close()
            except Exception as e:
                logging.exception(e)