# coding = utf-8
from browser import Driver
from Tmall import Tmall
from config import Temp_Path, Worker_Size
from tools.spider_utils import get_task, save_data


def worker(flag, tmall, task_id, item_list):
    res = []
    for item in item_list:
        if flag:
            # 从网络获取
            result = tmall.get_info_url(Temp_Path, task_id + "关键字", str(item))
        else:
            # 从本地获取
            result = tmall.get_info_file(Temp_Path, task_id + "关键字", str(item))
        if result:
            res.append(result)
    return res


def main():
    try:
        web_driver = Driver()
        t_mall = Tmall(web_driver)
        tasks = get_task(Worker_Size)
        for task in tasks:
            res = worker(1, t_mall, task['id'], task['items'])
            if res:
                # 保存数据 -> mongo
                save_data(task['id'], res)
    except Exception as e:
        print(e)
        print("爬虫出现异常")
    finally:
        pass
