#!/usr/bin/python
# -*- coding: UTF-8 -*-

import getopt
import json
import os
import sys
from multiprocessing import Pool

from baidu_image_spider import BaiDu_Image_Spider

reload(sys)
sys.setdefaultencoding('utf-8')


def download_img(fcn, word, path, img_num, img_num_need, img_file_names, **kwargs):
    """ 爬虫，下载图片
    input: 
        fcn: 图片下载源函数
        word: 搜索词
        path: 图片路径
        img_num: 当前图片数量
        img_num_need: 需求图片数量
        img_file_names: 已存在文件名
    output: 
        None
    """
    print(word)
    img_obj = fcn(word, path, img_num, img_num_need, img_file_names, **kwargs)
    res = img_obj.image_spider()
    if res:
        print('%s 完成下载！' % word)
    else:
        print('%s 还有%d张图片未完成下载...' %
              (word, img_obj.img_num_need - img_obj.img_num))
    return None


def get_args(node, path):
    """ 根据json文件递归获取文件夹地址
    input: 
        node: json节点
        path: 当前节点位置
    output: 
        None
    """
    if 'nodes' in node:
        if node['nodes']:
            for node_ in node['nodes']:
                if path != path_root:
                    word = os.path.split(path)[1].split(
                        '_')[0] + '%2B' + node_['text']
                else:
                    word = node_['text']
                new_path = os.path.join(
                    path, node_['text'] + '_' + node_['id'])
                img_num = node_['currentPicNum']
                img_num_need = node_['needPicNum']
                img_file_names = set(node_['currentPicNames'])
                for source in node_['sources']:
                    fcn = fcn_dict[source]
                    args.append((fcn, word, new_path, img_num,
                                 img_num_need, img_file_names))
                get_args(node_, new_path)
    return None


if __name__ == "__main__":
    # 获取命令行参数
    path_root = sys.argv[1]
    json_file = os.path.join(path_root, 'data.json')
    with open(json_file, 'r') as load_f:
        json_data = json.load(load_f)

    # 解析json
    fcn_dict = {
        'baidu': BaiDu_Image_Spider
    }
    args = list()
    get_args(json_data, path_root)

    # 爬虫
    pool = Pool()
    for arg in args:
        pool.apply_async(download_img, arg, {})
    pool.close()
    pool.join()
