from datetime import datetime,date
from queue import Queue
from bs4 import BeautifulSoup
import threading,requests,logging,sys,os,redis,time,json

proxypool_url = 'http://127.0.0.1:5555/random'

def get_redis(db=0):
    redis_config = {
                'ip' : '127.0.0.1',
                'port' : '6379',
            }
    if 'password' in redis_config:
        pool = redis.ConnectionPool(host=redis_config['ip'], port=redis_config['port'], password=redis_config['password'], db=db)
    else:
        pool = redis.ConnectionPool(host=redis_config['ip'], port=redis_config['port'], db=db)
    return redis.Redis(connection_pool=pool, decode_responses=True)

def log(data):
    log_dir_path = os.getcwd() + '/Runtime/log/'
    if os.path.exists(log_dir_path) != True:
        os.makedirs(log_dir_path)

    filename = log_dir_path + str(date.today().strftime("%Y-%m-%d")) + '.txt'

    with open(filename, 'a') as file_object:
        file_object.write('------' + str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + ' start------\n')
        file_object.write(data)
        file_object.write('------' + str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + ' end------\n')

def get_random_proxy():
    """
    get random proxy from proxypool
    :return: proxy
    """
    return requests.get(proxypool_url).text.strip()

def crawl(url, proxy):
    """
    use proxy to crawl page
    :param url: page url
    :param proxy: proxy, such as 8.8.8.8:8888
    :return: html
    """
    proxies = {'http': 'http://' + proxy}
    return requests.get(url, proxies=proxies).text

redis_obj = get_redis(8)
def tran(q):
    if q != '' and q!=None:
        params = {'inputtext':q, 'type':'AUTO'}
        proxy = redis_obj.lpop('ip')
        if proxy == None:
            proxy = get_random_proxy()
        else:
            proxy = proxy.decode('utf-8')
        try:
            data = requests.post('http://m.youdao.com/translate', params, proxies={'http': 'http://' + proxy}, timeout=(3.05, 5))
            if data.status_code == 200:
                html_content = BeautifulSoup(data.content, "html.parser")
                result = html_content.select('#translateResult > li')[0].text
                if result == '' or result == None:
                    return tran(q)

                redis_obj.lpush('ip', proxy)
                return result
        except Exception as e:
            return tran(q)

def task_loop(func, task_queue, callback):
    while not task_queue.empty():
        try:
            params = task_queue.get_nowait()
        except Exception as e:
            print(e, 'error')
            continue
        rst = func(**(params))

        if callback:
            callback(rst)

class MultiThreadTask():

    def __init__(self, func, thread_num, params_lst=[], callback=None):
        self.func = func
        self.thread_num = thread_num
        if not params_lst:
            logging.error('params do not allow empty')

        if thread_num >= len(params_lst):
            self.thread_num = len(params_lst)

        self.params_lst = params_lst

        self.task_queue = Queue(len(self.params_lst))

        for task in self.params_lst:
            self.task_queue.put(task)

        self.callback = callback

    def start(self):
        try:
            thread_lst = []
            for i in range(self.thread_num):
                thread_lst.append(threading.Thread(target=task_loop,
                                                   args=(self.func, self.task_queue, self.callback)))

            for t in thread_lst:
                t.start()

            for t in thread_lst:
                t.join()

        except KeyboardInterrupt as e:
            print("Caught KeyboardInterrupt, terminate workers and exit")
            sys.exit(0)