import requests
from bs4 import BeautifulSoup
import pymysql
import random


def daima(lista):
    i=1
    for i in range(150,186):
        print('爬取网页第%d页...' % i )
        url = 'http://www.yz21.org/stock/info/'
        if i==1:
            gethtml(url, lista)
        else:
            url=url+'stocklist_'+str(i)+'.html'
            gethtml(url, lista)


def gethtml(url, lista):
    kv = {'user-agent': 'Mozilla/4.0'}
    wd = {'wd': 'python'}
    r = requests.get(url, params=wd, headers=kv, timeout=30)
    r.raise_for_status()
    r.encoding = r.apparent_encoding
    html = r.text
    soup = BeautifulSoup(html, 'html.parser')
    aa = soup.find('table',attrs={'class':'stockBlock'})
    count = aa.find_all('tr')

    for i in range(1,len(count)):
        a=aa.find_all('tr')[i]
        num=a.find_all('td')[1].string
        name=a.find_all('td')[2].string
        lname = a.find_all('td')[3].string
        sname = a.find_all('td')[4].string
        zone = a.find_all('td')[1].find('a').attrs['href'][2:4]
        lista.append((num,name,lname,sname,zone))



def gupiao(lista, listb):
    for i in range(len(lista)):
        print('爬取股票第%d条...' % i)
        url = 'https://gupiao.baidu.com/stock/'
        a = lista[i][0]
        b = lista[i][4]
        # code  a
        # zone  b
        url = url + b+str(a) + '.html'
        getinfor(url, i,listb,a,b)


def getinfor(url, i,listb,a,b):
    try:

        kv_list =[
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
            "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
            "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15",
            ]
        kv_one=random.choice(kv_list)
        kv = {'user-agent': kv_one}
        proxies = {'http': 'http://10.10.1.10:3128', }
        '''
        kv = {'user-agent': "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"}
        '''
        wd = {'wd': 'python'}
        r = requests.get(url, params=wd, headers=kv, proxies=proxies, timeout=30)
        r.raise_for_status()
        r.encoding = r.apparent_encoding
        html = r.text
        soup = BeautifulSoup(html, 'html.parser')
        aa = soup.find('div', attrs={'class': "stock-bets"})
        ab = aa.find('strong')
        #ab = aa.find('strong',attrs={'class':'close'})
        ac=ab.next_sibling.next_sibling
        ad=ac.next_sibling.next_sibling


        price = str(ab.string)
        jingzhi= str(ac.string)
        baifenbi = str(ad.string)
        listb.append((a,b,price,jingzhi,baifenbi))
    except:
        print('%s %s is error' % (b,a))



def dba_save(list):

    db=pymysql.connect(host='localhost',user='root',passwd='123456',db='stock',port=3306,charset='utf8')
    conn=db.cursor()
    for i in range(len(list)):
        print('写入股票代码信息第%d条...' % i)
        sql=u'insert into stock_list(cod,zone,name,lname,sname) values("%s","%s","%s","%s","%s")'\
            %(list[i][0],list[i][4],list[i][1],list[i][2],list[i][3])
        conn.execute(sql)
        db.commit()
    conn.close()
    db.close()


def dbb_save(listb):
    db = pymysql.connect(host='localhost', user='root', passwd='123456', db='stock', port=3306, charset='utf8')
    conn = db.cursor()
    for i in range(len(listb)):
        print('写入股票价格信息第%d条...' %i )
        sql='insert into stock_value(code,zone,value,value_j,value_b) values("%s","%s","%s","%s","%s")'\
            %(listb[i][0],listb[i][1],listb[i][2],listb[i][3],listb[i][4])
        conn.execute(sql)
        db.commit()
    conn.close()
    db.close()


def main():
    lista = []
    listb=[]
    daima(lista)
    gupiao(lista, listb)
    dba_save(lista)
    dbb_save(listb)
    print('DONE')

main()


