import time
from multiprocessing import Pool
import datetime
import tmall

# 代理服务器
proxyHost = "http-pro.abuyun.com"
proxyPort = "9010"

# 代理隧道验证信息
proxyUser = "H70B99V9Z752485P"
proxyPass = "67CA67814D3A9E21"

service_args = [
    "--proxy-type=http",
    "--proxy=%(host)s:%(port)s" % {
        "host": proxyHost,
        "port": proxyPort,
    },
    "--proxy-auth=%(user)s:%(pass)s" % {
        "user": proxyUser,
        "pass": proxyPass,
    },
]


def single_crawler(dict):
    print('开始处理', dict['logo'], dict['url'])
    tmall.search_by_url(dict['logo'], dict['url'])


if __name__ == '__main__':
    list = tmall.search_by_keyword()
    pool = Pool(processes=4)
    for dict in list:
        pool.apply_async(single_crawler, (dict,))
    #pool.map_async(single_crawler, list)
    pool.close()
    pool.join()
    print('天猫手机爬虫任务完成')
