#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2020-04-22 06:01:24
# Project: scrawlByCategoryOne
import sys
import os
import json
import re
from pyspider.libs.base_handler import *

IMG_PATTERN = re.compile('https?://(?P<URLSCHEMA>[^/]+)/(?P<FILEPATH>\S+)/(?P<FILENAME>[^/]+\.(jpg|png|jpeg))$', re.I)
DIR_PATH = '/opt/pyspider/data'
IMG_ROOT_PATH = DIR_PATH + "/images"


def extract_image_prop(url):
    filename = ''
    filepath = ''
    if IMG_PATTERN.search(url):
        t = IMG_PATTERN.search(url).groupdict()
        # print(t["FILENAME"])
        filename = t["FILENAME"]
        filepath = t["FILEPATH"]
    return filename, filepath


class Handler(BaseHandler):
    crawl_config = {
    }

    def __init__(self):
        self.deal = Deal()
        self.headers = {"Content-Type": "application/x-www-form-urlencoded",
                        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, "
                                      "like Gecko) Chrome/81.0.4044.122 Safari/537.36",
                        "Accept": "*/*", "Accept-Encoding": "gzip, deflate, br", "Connection": "keep-alive"}

    @every(minutes=24 * 60)
    def on_start(self):
        self.crawl(url="https://wxmini.baixingliangfan.cn/baixing/wxmini/getCategory",
                   data={'comments': 'null'},
                   callback=self.index_page,
                   headers=self.headers,
                   method="POST")

    # @config(age=30 * 60)
    def index_page(self, response):
        dict_data = response.json
        print(dict_data)
        for oneCategory in dict_data['data']:
            oneCategory.pop('bxMallSubDto')
            print("oneCategory:", oneCategory)
            img_url = oneCategory['image']
            file_name, dir_path = extract_image_prop(img_url)
            dir_path = self.deal.mkDir(dir_path)
            self.crawl(url=img_url, callback=self.save_img,
                       save={'dir_path': dir_path, 'file_name': file_name, 'oneCategory': oneCategory})

    # 保存图片
    @catch_status_code_error
    def save_img(self, response):
        if response.status_code != 200:
            # print("save_img ERROR:", response.status_code)
            return {"error": response.status_code}
        else:
            content = response.content
            dir_path = response.save['dir_path']
            file_name = response.save['file_name']
            # print("save_img SUCCESS:", dir_path, file_name)
            oneCategory = response.save['oneCategory']
            file_path = dir_path + '/' + file_name
            print('dir_path:', dir_path)
            print('file_name', file_name)
            self.deal.saveImg(content, file_path)
            return {'file_name': file_name,
                    'file_path': file_path,
                    'oneCategory': oneCategory
                    }


class Deal:
    def __init__(self):
        self.path = IMG_ROOT_PATH
        if not self.path.endswith('/'):
            self.path = self.path + '/'
        if not os.path.exists(self.path):
            os.makedirs(self.path)

    def mkDir(self, path):
        path = path.strip()
        dir_path = self.path + path
        exists = os.path.exists(dir_path)
        if not exists:
            os.makedirs(dir_path)
            return dir_path
        else:
            return dir_path

    def saveImg(self, content, path):
        f = open(path, 'wb')
        f.write(content)
        f.close()

    def saveBrief(self, content, dir_path, name):
        file_name = dir_path + "/" + name + ".txt"
        f = open(file_name, "w+")
        f.write(content.encode('utf-8'))

    def getExtension(self, url):
        extension = url.split('.')[-1]
        return extension
