import argparse
import json
import time
import traceback

import redis
import requests
import re
from concurrent.futures import ThreadPoolExecutor, as_completed, TimeoutError
from log import Logger, BaseLogData

access_log = Logger('access.log')
error_log = Logger('error.log')

redis_0 = redis.StrictRedis(host="192.168.31.48", port=6379, db=0)
headers = {
    'authority': 'seller-acs.aliexpress.com',
    'pragma': 'no-cache',
    'cache-control': 'no-cache',
    'sec-ch-ua': '^\\^Google',
    'accept': 'application/json',
    'sec-ch-ua-mobile': '?0',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36',
    'content-type': 'application/x-www-form-urlencoded',
    'origin': 'https://gsp.aliexpress.com',
    'sec-fetch-site': 'same-site',
    'sec-fetch-mode': 'cors',
    'sec-fetch-dest': 'empty',
    'referer': 'https://gsp.aliexpress.com/',
    'accept-language': 'zh-CN,zh;q=0.9',
}


def spider(_data):
    job_id = _data['job_id']
    # if job_id in [15, 16]:
    #     time.sleep(5)
    try:
        resp = requests.get(url=_data['url'], headers=headers)
        stock = int(re.findall(r'totalAvailQuantity"(.*?),', resp.text)[0][1:-1])
        return Result(data=dict(stock=stock), job_id=job_id)
    except Exception as e:
        return Result(job_id=job_id, error_code=1, error_msg=traceback.format_exc())


class Result:

    def __init__(self, job_id, error_msg='', error_code=0, data=None):
        self.error_msg = error_msg
        self.error_code = error_code
        self.data = data
        self.job_id = job_id

    def json(self):
        data = dict(
            error_msg=self.error_msg,
            error_code=self.error_code,
            data=self.data,
            job_id=self.job_id
        )
        info = '运行结果'
        if self.error_code:
            error_log.logger.error(BaseLogData(info=info, data=data).message)
        else:
            access_log.logger.info(BaseLogData(info=info, data=data).message)
        requests.post(url='http://192.168.31.169:8085/RedisQueue/QueueStatusUpdate', data=dict(state=1, content=json.dumps(data)))
        # print(resp.json())
        return data


def parse_arguments():
    _parser = argparse.ArgumentParser(
        description='handle args from cmd.')
    _parser.add_argument('-m', '--max_workers', dest='max_workers',
                         type=int, default=1,
                         help='线程池长度, 默认1')
    _parser.add_argument('-t', '--timeout', dest='timeout', type=int,
                         default=5, help='超时时间, 默认5s')
    return _parser.parse_args()


def main():
    parser = parse_arguments()
    max_workers = parser.max_workers
    timeout = parser.timeout
    while True:
        # job = redis_0.rpoplpush('todo', 'doing')
        with ThreadPoolExecutor(max_workers=max_workers) as t:
            # all_task = [t.submit(spider) for _ in range(max_workers)]
            # wait(all_task, return_when=ALL_COMPLETED)
            # print('finished')
            obj_list = []
            job_ids = []
            for _ in range(max_workers):
                job = redis_0.lpop('todo')
                if job:
                    job = json.loads(job)
                    job_ids.append(job['job_id'])
                    obj = t.submit(spider, job)
                    obj_list.append(obj)
            obj_dict = {}
            try:
                for future in as_completed(obj_list, timeout=timeout):
                    data = future.result()
                    obj_dict[data.job_id] = data.json()
            except TimeoutError as e:
                for job_id in job_ids:
                    if job_id not in obj_dict:
                        Result(job_id=job_id, error_msg="请求超时", error_code=1).json()


if __name__ == '__main__':
    main()
