# 爬虫的基类
import concurrent.futures

import requests


class SpiderBase(object):
    def __init__(self):
        self.source_url = ''
        self.max_try_times = 3

    def get_app_list(self, url=None, page=1):
        raise NotImplementedError()

    def get_app_info(self, url):
        raise NotImplementedError()

    def _run_warp(self, fun, /, *args, **kwargs):
        try_times = 0
        _return_num = 1
        if '_return_num' in kwargs:
            _return_num = kwargs['_return_num']
            kwargs.pop('_return_num')
        while try_times < self.max_try_times:
            try:
                return fun(*args, **kwargs)
            except requests.RequestException:
                print('RequestException: 网络异常')
            except ValueError:
                print('ValueError: 数值错误')
            except OSError:
                print('OSError: 系统错误')
            except TypeError:
                print('TypeError: 类型错误')
            except AttributeError:
                print('AttributeError: 属性引用错误')
            try_times += 1
        if _return_num == 1:
            return 'Error'
        else:
            return ['Error'] * _return_num

    def _run(self, urls):
        app_info = []
        error_list = []
        num = 0
        for url in urls:
            result = self._run_warp(self.get_app_info, url)
            app_info.append(result)
            num += 1
            if result == 'Error':
                error_list.append(url)
                print('[{0}/{1}] **Error**: 错误，无法获取 {2}'.format(num, len(urls), url))
            else:
                print('[{0}/{1}] Done: {2}'.format(num, len(urls), url))
        return app_info, error_list

    def _run_concurrent(self, urls, max_workers=None):
        app_info = urls.copy()
        error_list = []
        num = 0
        with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
            future_to_url = {executor.submit(self._run_warp, self.get_app_info, url): url for url in urls}
            for future in concurrent.futures.as_completed(future_to_url):
                app_info[app_info.index(future_to_url[future])] = future.result()
                num += 1
                if future.result() == 'Error':
                    error_list.append(future_to_url[future])
                    print('[{0}/{1}] **Error**: 错误，无法获取 {2}'.format(num, len(urls), future_to_url[future]))
                else:
                    print('[{0}/{1}] Done: {2}'.format(num, len(urls), future_to_url[future]))
        return app_info, error_list

    def run(self, urls, max_workers=None, max_try_times=5):
        if max_try_times <= 0:
            raise ValueError()
        else:
            self.max_try_times = max_try_times

        if max_workers is None:
            return self._run_concurrent(urls)
        elif max_workers <= 0:
            raise ValueError()
        elif max_workers == 1:
            return self._run(urls)
        else:
            return self._run_concurrent(urls, max_workers)

    def run_get_app_list(self, url=None, start_page=1):
        app_list, pages = self._run_warp(self.get_app_list, url, start_page, _return_num=2)
        if app_list == 'Error':
            raise ValueError()
        else:
            print('[{0}/{1}] Done!'.format(start_page, pages))
            for i in range(start_page + 1, pages + 1):
                result = self._run_warp(self.get_app_list, url, i, _return_num=2)[0]
                if result == 'Error':
                    print('[{0}/{1}] **Error**'.format(i, pages))
                    return app_list
                else:
                    print('[{0}/{1}] Done!'.format(i, pages))
                    app_list += result
        return app_list
