# import libraries 

from bs4 import BeautifulSoup
import requests
import time
import datetime

import smtplib

import json
import random
from scrapy import Selector
import time
import threading
import queue

def task_one_page(asin, page, ip_port):
    proxy = {
        'http': 'http://{}'.format(ip_port),
        'https': 'http://{}'.format(ip_port)
    }
    
    headers = {
        'authority': 'www.amazon.com',
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36",
    }

    post_data = {
        "sortBy": "recent",
        "reviewerType": "all_reviews",
        "formatType": "",
        "mediaType": "",
        "filterByStar": "",
        "filterByAge": "",
        "pageNumber": 10,
        "filterByLanguage": "",
        "filterByKeyword": "",
        "shouldAppend": "undefined",
        "deviceType": "desktop",
        "canShowIntHeader": "undefined",
        "pageSize": "10",
        "asin": "",
        "scope": "reviewsAjax0"
    }
    # 翻页关键payload参数赋值
    post_data["pageNumber"] = page,
    post_data["reftag"] = f"cm_cr_getr_d_paging_btm_next_{page}",
    post_data["scope"] = f"reviewsAjax{page}",
    post_data["asin"] = asin
    # 翻页链接赋值
    spiderurl=f'https://www.amazon.com/hz/reviews-render/ajax/reviews/get/ref=cm_cr_arp_d_paging_btm_next_{page}'
    
    success_flag = False 
    try:
        res = requests.post(spiderurl,headers=headers,data=post_data, proxies=proxy)
        if res and res.status_code == 200:
            res = res.content.decode('utf-8')
            # print(res)
            success_flag = True
            contents = res.split('&&&')
            for content in contents:
                infos = content.split('","')
                info = infos[-1].replace('"]','').replace('\\n','').replace('\\','')
                # 评论内容判断
                if 'data-hook="review"' in info:
                    sel = Selector(text=info)
                    data = {}
                    data['username'] = sel.xpath('//span[@class="a-profile-name"]/text()').extract_first() #用户名
                    data['point'] = sel.xpath('//span[@class="a-icon-alt"]/text()').extract_first() #评分
                    data['date'] = sel.xpath('//span[@data-hook="review-date"]/text()').extract_first() #日期地址
                    data['review'] = sel.xpath('//span[@data-hook="review-title"]/span/text()').extract_first() #评价标题
                    data['detail'] = sel.xpath('//span[@data-hook="review-body"]').extract_first() #评价内容
                    image = sel.xpath('div[@class="review-image-tile-section"]').extract_first()
                    data['image'] = image if image else "not image" #图片
                    print(data)
    except:
        print('something wrong happen!')
    
    return success_flag

def get_proxies_json():
    # 1
    # api = 'https://tq.lunaproxy.com/getflowip?neek=1219342&num=50&type=2&sep=1&regions=all&ip_si=1&level=1&sb='
    # 2
    # api = 'https://tq.lunaproxy.com/getflowip?neek=1219342&num=50&type=2&sep=1&regions=all&ip_si=2&level=1&sb='
    # 3
    api = 'https://tq.lunaproxy.com/getflowip?neek=1219342&num=25&type=2&sep=1&regions=all&ip_si=2&level=1&sb='
    resp = requests.get(api)
    resp_txt = resp.text
    # read as json
    resp_dict = json.loads(resp_txt)
    # check the content of the response json
    if resp_dict['code'] == 0:
        return resp_dict['data']
    else:
        return None

def get_one_proxy():
    proxy_ls = get_proxies_json()
    if proxy_ls is not None:
        proxy_dict = random.choice(proxy_ls)
        ip = proxy_dict['ip']
        port = proxy_dict['port']
        ip_port = '{ip}:{port}'.format(ip=ip,port=port)
        return ip_port
    else:
        return None


class ProxyManager:
    def __init__(self, simultaneous_num) -> None:
        self._api = 'https://tq.lunaproxy.com/getflowip?neek=1219342&num=25&type=2&sep=1&regions=all&ip_si=2&level=1&sb='
        self._blocked_ips = set()
        self._ip_port_pool_vacant = queue.Queue(simultaneous_num)
        self._ip_port_pool_using = set()
        

    def refresh_pool(self):
        # retrieve ips using api
        resp = requests.get(self._api)
        resp_txt = resp.text
        # read as json
        resp_dict = json.loads(resp_txt)
        # check the content of the response json
        proxy_ls = [] 
        if resp_dict['code'] == 0:
            proxy_ls = resp_dict['data']
        else:
            return
        
        # gather all known ips
        vacant_ips = {x.split(':')[0] for x in self._ip_port_pool_vacant}
        using_ips = {x.split(':')[0] for x in self._ip_port_pool_using}
        
        known_ips = vacant_ips | using_ips | self._blocked_ips

        # add the new ip(with port) to vacant pool
        for d in proxy_ls:
            ip = d['ip']
            port = d['port']
            ip_port = '{ip}:{port}'.format(ip=ip, port=port)
            if ip not in known_ips:
                self._ip_port_pool_vacant.put(ip_port)
            known_ips.add(ip)

    
    def withdraw_vacant_ip_port(self):
        if len(self._ip_port_pool_vacant) > 0:
            return self._ip_port_pool_vacant.get()
        else:
            return None
    

    def update_ip_status(self, ip_port, is_blocked):
        ip, _ = ip_port.split(':')
        self._ip_port_pool_using.remove(ip_port)
        if is_blocked:
            self._blocked_ips.add(ip) # record the ip as being blocked
        else:
            self._ip_port_pool_vacant.put(ip_port)  # put the ip_port back into the vacant pool


class Task:
    def __init__(self, asin, page, stars) -> None:
        self.asin = asin
        self.page = page
        self.stars = stars


class TaskManger:
    def __init__(self, asin, num_threads) -> None:
        self._asin = asin
        self._proxy_manager = ProxyManager()
        self._task_queue = queue.Queue(num_threads)
    
    def _producer(self):
        for stars in range(1, 6):
            for page in range(1, 11):
                # see if there is vacant ip
                ip_port = self._proxy_manager.withdraw_vacant_ip_port()
                tsk = Task(self._asin, page, stars)
                self._task_queue.put(tsk)


    def _consumer(self):
        while True:
            tsk = self._task_queue.get()
            if tsk is None:
                break
            asin = tsk.asin
            stars = tsk.stars
            page = tsk.page
            ip_port = self._proxy_manager.withdraw_vacant_ip_port()
            thd = threading.Thread(target=self._task_one_page, args=(asin, stars, page, ip_port))
            


    def _task_one_page():
        pass

    def run(self):
        self._proxy_manager.refresh_pool()

        # called every time issue a new task
        ip_port = self._proxy_manager.withdraw_vacant_ip_port()
        
        # # 启动生产者线程
        # producer_thread = threading.Thread(target=self._producer)
        # producer_thread.start()
        
        self._producer()
    
        # 启动消费者线程
        consumer_thread = threading.Thread(target=self._consumer)
        consumer_thread.daemon = True  # 确保消费者线程随主线程结束
        consumer_thread.start()

        # producer_thread.join()
        self._task_queue.join()

        for stars in range(1, 6):
            for page in range(1, 11):
                asin = self._asin
                tsk = threading.Thread(target=self._task_one_page, args=(asin, stars, page, ip_port))

        


if __name__ == '__main__':
    # # B0BJLDJZPR
    # ip_port = get_one_proxy()
    # if ip_port is not None:
    #     task_one_page('B0BJLDJZPR', 4, ip_port)
    # ip_port = '43.159.18.19:28966'
    # k, c = ip_port.split(':')
    # print(k, c)
    

    # # test
    # pm = ProxyManager()
    # pm.refresh_pool()
    # print(pm.withdraw_vacant_ip_port())
    q = queue.Queue(3)
    for i in range(3):
        q.put(i)
