from urllib3 import *
import sqlite3
import json
import re
import os
from bs4 import BeautifulSoup
import requests
import math
import threading
import random
from fake_useragent import UserAgent

disable_warnings()
# 创建数据库

dbPath = 'bookNew2.sqlite'
if os.path.exists(dbPath):
    os.remove(dbPath)
conn = sqlite3.connect(dbPath)
cursor = conn.cursor()
cursor.execute('''create table t_sales
            (id integer primary key autoincrement not null,
            name text null, 
            author text null,
             discount text not null,
             commentNum text not null,
             price text not null
            );''')
conn.commit()

# Cookie劫持
http = PoolManager()

def str2Headers(file):
    headerDict = {}
    f = open(file,'r')
    headersText = f.read()
    headers = re.split('\n',headersText)
    for header in headers:
        result = re.split(':',header,maxsplit = 1)
        headerDict[result[0]] = result[1]
    f.close()
    return headerDict
headers = str2Headers('headers.txt')
def str2Headers2(file):
    headerDict = {}
    f = open(file,'r')
    headersText = f.read()
    headers = re.split('\n',headersText)
    for header in headers:
        result = re.split(':',header,maxsplit = 1)
        headerDict[result[0]] = result[1]
    f.close()
    return headerDict
headers2 = str2Headers2('headers2.txt')

#######在这里获取ip代理，对抗反扒措施
def get_ip_list(url, headers):
  web_data = requests.get(url, headers=headers)
  soup = BeautifulSoup(web_data.text, 'lxml')
  ips = soup.find_all('tr')
  ip_list = []
  for i in range(1, len(ips)):
    ip_info = ips[i]
    tds = ip_info.find_all('td')
    ip_list.append(tds[1].text + ':' + tds[2].text)
  return ip_list
def get_random_ip(ip_list):
  proxy_list = []
  for ip in ip_list:
    proxy_list.append('http://' + ip)
  proxy_ip = random.choice(proxy_list)
  proxies = {'http': proxy_ip}
  return proxies
proxies=get_Proxy()
print(proxies)
def get_Proxy():
  url = 'http://www.xicidaili.com/nn/'
  headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'
  }
  ip_list = get_ip_list(url, headers=headers)
  proxies = get_random_ip(ip_list)
  return proxies
#######在这里获取ip代理，对抗反扒措施
  

def getExactPage(productID):

    url='http://product.dangdang.com/index.php?r=callback%2Frecommend&productId='+productID+'&shopId=0&pageType=publish&module=&isBroad=true'
    ua = UserAgent()
    headers2['User-Agent']=ua.ie
    r = requests.get(url,headers=headers2)
    r.encoding = r.apparent_encoding
    if(r):
        if('alsoBuy' in json.loads(r.text)['data']):
            print(json.loads(r.text)['data']['alsoBuy']['list'])
            for item in json.loads(r.text)['data']['alsoBuy']['list']:
                name=item['productName']
                author=item['authorName']
                discount=item['discount'].replace('[','').replace(']','').replace('折','')
                commentNum=item['commentNum']
                price=item['salePrice']
                cursor.execute('''insert into t_sales(name,author,discount,commentNum,price) 
                                    values('%s','%s','%s','%s','%s') ''' % (name,author,discount,commentNum,price))
                conn.commit()
                print('insert 1 data')
    
        else:
            print("no alsoBuy")     
    else:
        print('no response')
    
def getProductIdList():

    url='http://search.dangdang.com/?key=%C8%CB%BC%E4%CA%A7%B8%F1&act=input&page_index=1'
    ua = UserAgent()
    headers2['User-Agent']=ua.ie
    r = requests.get(url,headers=headers2)
    r.encoding = r.apparent_encoding
    
    soup = BeautifulSoup(r.text,'lxml')
    
    linkList = []

    tags1 = soup.find('div', attrs={'ddt-pit':'10'})
    spans=tags1.find_all('span')
    pageNum=int(str(spans[1]).replace('<span>','').replace('</span>','').replace('/',''))
    
    tags2=soup.find_all("a",attrs={'name':'itemlist-title'})
    for tag in tags2:   
        getExactPage(tag['href'].replace('http://product.dangdang.com/','').replace('.html',''))
    conn.close()

getProductIdList()

