# -*- coding: utf-8 -*-
import requests  # 导入requests
from bs4 import BeautifulSoup  # 从bs4中导入BeautifulSoup
# from lxml.html import etree
import random  # 取随机数
import time
from db.mysql_conn import MysqlUtils, config

from config.conf import  proxiesDEF, user, Referer_list
from config.useragent import user_agent_list
mysql = MysqlUtils(config)
user_agent_len = len(user_agent_list) - 10
# 二级目录 list
second_list = []
second_namelist = []
# 三级目录 list
third_list = []
third_namelist = []
# 四级目录 list
fourth_list = []
fourth_namelist = []
# 五级目录 list
fifth_list = []
fifth_namelist = []



# 页面的跳转，及商品asin的获取，及 ranking的获取
'''
url, 
callback,
Boo  是否 获取链接
Foo
'''
def jumpUrl(_url, callback, Boo, Foo):
    print(callback)
    time.sleep(random.uniform(3.1, 6.5))
    url = _url
    # proxies_list = proxiesDEF()
    proxies_list = ['221.122.91.60:80', '125.123.127.251:9000', '125.73.220.18:49128', '106.12.201.174:8080', '125.123.124.192:9000', '125.123.126.137:9000', '125.123.65.5:9000', '27.195.216.21:8118', '222.128.9.235:33428', '125.123.121.214:9000', '114.249.113.151:9000', '123.56.74.221:80', '223.156.112.100:9000', '47.104.172.108:8118', '125.123.124.145:9000', '221.122.91.65:80', '122.136.212.132:53281', '125.123.44.147:9000', '218.108.175.15:80', '52.83.202.3:8118', '47.107.158.219:8000', '124.205.155.157:9090', '182.35.81.10:9999', '45.125.32.181:3128', '182.35.86.175:9999', '1.198.72.162:9999', '163.204.242.30:9999', '125.123.44.81:9000', '61.160.210.223:808', '119.178.101.18:8888', '221.122.91.61:80', '123.132.232.254:61017', '114.225.29.112:8118', '43.254.168.56:53281', '69.160.173.3:8000', '123.163.97.154:9999', '163.204.244.166:9999', '163.204.243.219:9999', '163.204.245.109:9999', '125.123.66.161:9000', '134.175.55.15:8888', '182.35.80.251:9999', '125.123.141.95:9000', '163.125.70.239:8888', '218.60.8.83:3129', '125.65.79.60:3311', '116.52.100.233:8888', '182.122.205.72:8118', '106.9.169.41:808', '175.43.34.17:9999']
    # proxies = {'http': proxies_list[random.randint(0, len(proxies_list) - 1)]}
    headers = {
        'User-Agent': user_agent_list[random.randint(0, user_agent_len)],
        'Referer': Referer_list[random.randint(0, len(Referer_list) - 1)]
    }
    try:
        response = requests.get(url, timeout=(3, 7))
        # response = requests.get(url, headers=headers, proxies=proxies, timeout=(3, 7))
    except:
        time.sleep(random.uniform(0.1, 1.3))
        response = requests.get(url, timeout=(3, 7))
    response_soup = BeautifulSoup(response.text, 'html.parser')
    if Boo:
        # 菜单链接获取
        callback(response_soup)


def firstList(goodsType, url, sqlStr):
    # 第一层目录
    print('firstUrl=======================================================')
    # 跳转页面 并获取第一个list，获取 asin 并跳往详情页
    jumpUrl(url, secondList, True, False)
    insertSql(0, 1, goodsType, '', '', '', '', goodsType, splitLink(url), sqlStr)
    for index, secondUrl in enumerate(second_list):
        if index >= 0:
            print('secondUrl=     '+ str(index) +'=    ' + second_namelist[index] + '=====================================================')
            jumpUrl(secondUrl, thirdList, True, True)
            insertSql(0, 2, goodsType, second_namelist[index], '', '', '', second_namelist[index], splitLink(second_list[index]), sqlStr)
            for ind, thirdUrl in enumerate(third_list):
                if ind >= 0:
                    print('thirdUrl==  '+ str(ind) +'=    '+ third_namelist[ind] +'=|||||||||||||==secondUrl=   ' + str(index) + '=  ' + second_namelist[index])
                    jumpUrl(thirdUrl, fourthList, True, True)
                    insertSql(0, 3, goodsType, second_namelist[index], third_namelist[ind], '', '', third_namelist[ind], splitLink(third_list[ind]), sqlStr)
                    print(len(fourth_list))
                    for i, fourthUrl in enumerate(fourth_list):
                        print('fourthUrl== '+ str(i) +'=   '+ fourth_namelist[i] + '     =|||||||=thirdUrl==    ' + str(ind) +'== =='+ third_namelist[ind] + '       =|||||||secondUrl= '+ str(index) +'==' + second_namelist[index])
                        jumpUrl(fourthUrl, fifthList, True, True)
                        insertSql(0, 4, goodsType, second_namelist[index], third_namelist[ind], fourth_namelist[i], '', fourth_namelist[i], splitLink(fourth_list[i]), sqlStr)
                        for ix, fifthUrl in enumerate(fifth_list):
                            print('fifthUrl' + str(ix) + '===   ' + fifth_namelist[ix] + 'fourthUrl== ' + str(i) + '=   ' + fourth_namelist[i] + '     =|||||||=thirdUrl==    ' + str(ind) + '== ==' + third_namelist[ind] + '       =|||||||secondUrl= ' + str(index) + '==' + second_namelist[index])
                            insertSql(0, 5, goodsType, second_namelist[index], third_namelist[ind], fourth_namelist[i], fifth_namelist[ix], fifth_namelist[ix], splitLink(fifth_list[ix]), sqlStr)
                        del fifth_list[:]
                        del fifth_namelist[:]
                del fourth_list[:]
                del fourth_namelist[:]
        del third_list[:]
        del third_namelist[:]
    print('=====================================跑完了===============================================================')

def splitLink(link):
    return link.split('ref=')[0]


def secondList(response_soup):
    # 二级目录 list  保存
    second_tree = response_soup.select('#zg_browseRoot > ul > ul > li > a')
    for ahref in second_tree:
        second_list.append(ahref.attrs['href'])
        second_namelist.append(ahref.string)
    print(str(len(second_list)) + '0-000')


def thirdList(response_soup):
    # 二级目录 list  保存  baby & child care
    third_tree = response_soup.select('#zg_browseRoot > ul > ul > ul > li > a')
    for ahref in third_tree:
        third_list.append(ahref.attrs['href'])
        third_namelist.append(ahref.string)


def fourthList(response_soup):
    # 二级目录 list  保存
    fourth_tree = response_soup.select('#zg_browseRoot > ul > ul > ul > ul > li > a')
    for ahref in fourth_tree:
        fourth_list.append(ahref.attrs['href'])
        fourth_namelist.append(ahref.string)


def fifthList(response_soup):
    # 二级目录 list  保存
    try:
        fifth_tree = response_soup.select('#zg_browseRoot > ul > ul > ul > ul > ul > li > a')
        for ahref in fifth_tree:
            fifth_list.append(ahref.attrs['href'])
            fifth_namelist.append(ahref.string)
    except:
        print('没有五级菜单')


def insertSql(status, zindex, zindex1, zindex2, zindex3, zindex4, zindex5, title, link, sqlStr):
    sqlParam = [status, zindex, zindex1, zindex2, zindex3, zindex4, zindex5, title, link]
    mysql.insert(sqlStr, sqlParam)


if __name__ == '__main__':
    # goodsType = 'Health & Household'
    # url = 'https://www.amazon.com/Best-Sellers-Health-Personal-Care/zgbs/hpc/ref=zg_bs_nav_0'
    # sqlStr = "INSERT INTO HealthMenuLink (zindex, 0, zindex1, zindex2, zindex3, zindex4, zindex5, title, link) values (%s, %s, %s, %s, %s, %s, %s, %s, %s)"
    goodsType = 'Beauty & Personal Care'
    url = 'https://www.amazon.com/Best-Sellers-Beauty/zgbs/beauty/ref=zg_bs_nav_0'
    sqlStr = "INSERT INTO BeautyMenuLink (status, zindex, zindex1, zindex2, zindex3, zindex4, zindex5, title, link) values (%s, %s, %s, %s, %s, %s, %s, %s, %s)"

    firstList(goodsType, url, sqlStr)



