import os
import time
import urllib3
import requests
import random
from urllib import parse
from selenium import webdriver
from selenium.webdriver.chrome.options import Options


def chrome_brower(headless=False, proxy=False, disableIMG=True, disableJS=True, download_path=""):
    if os.name == "nt":
        chromedriver = "chromedriver.exe"
    else:
        chromedriver = "chromedriver"
    # driver_path = os.path.join(os.getcwd(),"blog","spider", chromedriver)
    driver_path = "D:\\pycode\\JT_spider\\common\\chromedriver.exe"

    chrome_options = Options()
    chrome_options.add_argument('--no-sandbox')

    if headless is True:
        chrome_options.add_argument('--disable-gpu')
        chrome_options.add_argument('--headless')

    if proxy is True:
        chrome_options.add_argument('--proxy-server=127.0.0.1:80')

    prefs = dict()
    load_image = 2 if disableIMG else 0
    load_js = 2 if disableJS else 0

    prefs['profile.default_content_setting_values'] = {'images': load_image,
                                                       'javascript': load_js
                                                       }
    if download_path:
        prefs['profile.default_content_settings.popups'] = 0
        prefs['download.default_directory'] = '%s' % download_path
    if prefs:
        chrome_options.add_experimental_option('prefs', prefs)
    driver = webdriver.Chrome(
        chrome_options=chrome_options, executable_path=driver_path)

    return driver


def request_fun(url):
    user_agent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML,' \
                 'like Gecko) Chrome/60.0.3112.90 Safari/537.36'
    headers = {'User-Agent': user_agent}
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    try:
        repeat = 0
        while repeat < 3:
            repeat += 1
            response = requests.get(url, headers=headers, verify=False)

            if response.content:
                return response.content
            elif repeat == 3:
                print("download pic fail: %s" % url)
            else:
                sleep_time = repeat * 5
                print("download pic fail: %s" % url)
                print("sleep %s second, then retry....%s/2....." %
                      (sleep_time, repeat))
                time.sleep(sleep_time)

    except Exception as e:
        print("request_fun:\n", e)
        return False


def request_for_sales(product_rank, product_category):
    url_base = "https://www.amztracker.com/unicorn.php?"
    data_dic = {
        "domain": "www.amazon.com",
        "category": product_category,
        "rank": product_rank,
        "_": random.randint(1500000000000, 1599999999999)  # 1544611362601,
    }
    url_plus = parse.urlencode(data_dic)
    try:
        resp = requests.get(url_base+url_plus).text
    except Exception as e:
        print("request_for_sales", e)
        resp = None

    if resp:
        return int(resp)
    else:
        print(product_rank, "sale is 0")
        return 0


if __name__ == "__main__":
    pic_url = "https://www.joann.com/dw/image/v2/AAMM_PRD/on/demandware.static/-/Sites-joann-product-catalog/default/dw71bec696/images/hi-res/17/17667684.jpg"
    ss = request_fun(pic_url)
    print(bool(ss))
