import requests
from bs4 import BeautifulSoup
import pymysql
import time
class TypeSpider(object):
    conn = pymysql.connect(
        host='localhost', user='root', password="admin",
        database='python', port=3306,
        charset='utf8'
    )
    cursor = conn.cursor()
    """初始化 传入类型地址"""
    def __init__(self,):
        self.sql="INSERT INTO `cmdb_dnfprice` (`id`, `price`, `date`, `server`) VALUES (NULL,%s, NOW(),%s)" #"insert into video_type(name,href) values(%s,%s)"

    def start(self):
        self.serverFlag = 6
        self.spiderUrl ='https://www.uu898.com/newTrade.aspx?gm=95&area=2323&srv=25077&c=-3'
        # fwq=['https://www.uu898.com/newTrade.aspx?gm=95&area=2323&srv=25077&c=-3',
        #      'https://www.uu898.com/newTrade.aspx?gm=95&area=2331&srv=25018&c=-3']
        # for i in range(0,2):
        #     self.spiderUrl =fwq[i]
        #     if i==0:
        #         self.serverFlag=6
        #     if i==1:
        #         self.serverFlag=5

        self.__analysis(self.__fetch_content())

    def __analysis(self,html):
        try:
            soup = BeautifulSoup(html, 'lxml')
            li=soup.find('li',class_='sp_li1').find('span').get_text()
            price=li.lstrip('1元=').rstrip('万金')
            print(price)
            self.cursor.executemany(self.sql,[(price,self.serverFlag)])
            self.conn.commit()
        except BaseException:
            print("位置错误，跳过")

    def __fetch_content(self):
        resObj=requests.get(self.spiderUrl)
        resObj.encoding="utf-8"
        html=resObj.text
        return html

if __name__=="__main__":
    typeS=TypeSpider()
    while 1 :
        typeS.start()
        time.sleep(10)



