# -*- coding: utf-8 -*-
import requests  # 导入requests
from bs4 import BeautifulSoup  # 从bs4中导入BeautifulSoup
# from lxml.html import etree
import random  # 取随机数
import time
import urllib3 # 忽略 https 警告用
from db.mysql_conn import MysqlUtils, config
from config.conf import  proxiesDEF, user, Referer_list
from config.useragent import user_agent_list
mysql = MysqlUtils(config)
user_agent_len = len(user_agent_list) - 10
sqlStr = "INSERT INTO best190918 (country,asin,rankingtype,ranking,star,reviews,moneytype,price,url,name,img ) values (%s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s)"
goodsNum = 0

'''
使用说明
1 通过不同的表获取要爬取的链接， 暂时 表有两个  HealthMenuLink   BeautyMenuLink
2 通过链接获取到想要的内容，插入到表格  （建议每次插入的表格都是新的，最后复制到best0711）

'''


def jumpUrl(url):
    time.sleep(random.uniform(3.1, 6.5))
    # proxies_list = proxiesDEF()
    proxies_list = ['221.122.91.60:80', '125.123.127.251:9000', '125.73.220.18:49128', '106.12.201.174:8080', '125.123.124.192:9000', '125.123.126.137:9000', '125.123.65.5:9000', '27.195.216.21:8118', '222.128.9.235:33428', '125.123.121.214:9000', '114.249.113.151:9000', '123.56.74.221:80', '223.156.112.100:9000', '47.104.172.108:8118', '125.123.124.145:9000', '221.122.91.65:80', '122.136.212.132:53281', '125.123.44.147:9000', '218.108.175.15:80', '52.83.202.3:8118', '47.107.158.219:8000', '124.205.155.157:9090', '182.35.81.10:9999', '45.125.32.181:3128', '182.35.86.175:9999', '1.198.72.162:9999', '163.204.242.30:9999', '125.123.44.81:9000', '61.160.210.223:808', '119.178.101.18:8888', '221.122.91.61:80', '123.132.232.254:61017', '114.225.29.112:8118', '43.254.168.56:53281', '69.160.173.3:8000', '123.163.97.154:9999', '163.204.244.166:9999', '163.204.243.219:9999', '163.204.245.109:9999', '125.123.66.161:9000', '134.175.55.15:8888', '182.35.80.251:9999', '125.123.141.95:9000', '163.125.70.239:8888', '218.60.8.83:3129', '125.65.79.60:3311', '116.52.100.233:8888', '182.122.205.72:8118', '106.9.169.41:808', '175.43.34.17:9999']
    proxies = {'http': proxies_list[random.randint(0, len(proxies_list) - 1)]}

    headers = {
        'User-Agent': user_agent_list[random.randint(0, user_agent_len)],
        'Referer': Referer_list[random.randint(0, len(Referer_list) - 1)]
    }
    try:
        urllib3.disable_warnings()
        response = requests.get(url, verify=False, timeout=(3, 7))

        # response = requests.get(url, headers=headers, proxies=proxies, timeout=(3, 7))
        time.sleep(random.uniform(0.3, 0.7))
        response_soup = BeautifulSoup(response.text, 'html.parser')
        getGoodsDeDetail(response_soup)
    except Exception as exc:
        print(exc)
        print('requests.get err')
        time.sleep(random.uniform(0.1, 1.3))



def goLink(goodsArr):
    try:
        for goods in goodsArr:
            print('===id======%s===============' % goods['id'])
            try:
                link = goods['link']
                time.sleep(random.uniform(0.3, 1.3))
                try:
                    jumpUrl(link)
                except Exception as exc:
                    print(exc)
                    print('jumpUrl(link) err')
            except Exception as exc:
                print(exc)
                print('link err')
            try:
                url = link + 'ref=zg_bs_pg_2?_encoding=UTF8&pg=2'
                time.sleep(random.uniform(0.3, 1.3))
                jumpUrl(url)
            except Exception as exc:
                print(exc)
                print('url err')
            try:
                updataStatus(goods['id'])
            except Exception as exc:
                print(exc)
    except Exception as exc:
        print(exc)
        print('===for goods in goodsArr:=========err=======')
        time.sleep(random.uniform(0.1, 1.3))
    firstList()


def firstList():
    global goodsNum
    # 第一层目录  一次获取两个链接，
    # str = 'select * from BeautyMenuLink where status = 0 limit %s, 5' % goodsNum
    # str = 'select * from BeautyMenuLink where status = 0 limit 1'
    # str = 'select * from HealthMenuLink where status = 0 limit %s, 5' % goodsNum
    str = 'select * from HealthMenuLink where status = 0 and id < 10000 limit 1'
    try:
        goodsArr = mysql.query(str)
        # print(goodsArr)
    except:
        goodsArr = []

    if len(goodsArr) > 0:
        goodsNum = goodsNum + 1
        goLink(goodsArr)
    else:
        print('=====================================跑完了===============================================================')


def getGoodsDeDetail(response_soup):
    print('getGoodsDeDetail')
    result_list = response_soup.find_all(name='li', attrs={"class": "zg-item-immersion"})
    print(len(result_list))
    for li in result_list:
        try:
            url = 'https://www.amazon.com' + li.find('a').attrs['href']
        except:
            url = ''
        try:
            asin = url.split('dp/')[1].split('?_encoding')[0]
        except:
            asin = ''
        try:
            name = li.find('a').find(name='div', attrs={'aria-hidden': 'true'}).string.strip()
        except:
            name = ''
        try:
            img = li.find('img').attrs['src']
        except:
            img = ''
        try:
            star = li.find(name='span', attrs={'class': 'a-icon-alt'}).string.strip()
            star = star.split(' out of ')[0]
            print(star)
        except:
            star = '0'
        try:
            reviews = list(li.select('a'))[2].string.replace(',', '')
            print(reviews)
        except:
            reviews = '0'
        try:
            pricestr = li.find('span', 'p13n-sc-price').string
            moneytype = pricestr[0:1]
            price = pricestr.replace(moneytype, '')
        except:
            price = 0
            moneytype = ''

        goods = {
            'country': user['country'],
            'asin': asin,
            'rankingtype': '',  # 大类目 类型
            'ranking': '',  # 大类目 排名
            'star': star,
            'reviews': reviews,
            'moneytype': moneytype,
            'price': price,
            'url': url,
            'name': name,
            'img': img,
        }
        print(goods['asin'])
        if name != '':
            sqlParam = [goods['country'], goods['asin'], '', 0, goods['star'], goods['reviews'],
                        goods['moneytype'], goods['price'], goods['url'], goods['name'], goods['img']]
            mysql.insert(sqlStr, sqlParam)


def updataStatus(id):
    print('updataStatus   %s' % id)
    d = {'status': 1}  # 更新的字段及对应值
    t = ('id', id)  # 更新所需条件
    table_name = 'HealthMenuLink'
    # table_name = 'BeautyMenuLink'
    upSqlStr = 'UPDATE %s SET ' % table_name + ','.join(['%s=%r' % (k, d[k]) for k in d]) + ',updateTime = NOW() WHERE %s=%r;' % (t[0], t[1])
    mysql.update(upSqlStr)


if __name__ == '__main__':
    firstList()




