import requests
from bs4 import BeautifulSoup
import pymysql


class Spider():
    header = {
        'User-Agent': 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT6.1; Trident/5.0'
    }

    def __init__(self, goods, pages):
        self.__url = 'https://search.jd.com/Search?keyword=' + goods + '&enc=utf-8&scrolling=y&page='
        self.__pages = pages

    def __fetch_content(self, url):
        resObj = requests.get(url, headers=Spider.header)
        resObj.encoding = 'utf-8'
        html = resObj.text
        return html

    # 解析数据
    def __analysis(self, html):
        soup = BeautifulSoup(html, 'lxml')
        lists = soup.find(id='J_goodsList').find_all('li', class_="gl-item")
        products = []
        for product in lists:
            price = product.find('div', class_='p-price').find('i').get_text()
            name_info = product.find('div', class_='p-name p-name-type-2').find('a')
            title = name_info['title']
            href = name_info.get('href')
            item = {'title': title, 'href': href, 'price': price}
            products.append(item)
        return products

    # 精炼数据
    def __refine(self, products):
        def less4k(info):
            return float(info['price']) > 4000

        def complete_href(info):
            if info['href'][0] != 'h':
                info['href'] = 'https:' + info['href']
            return info

        products = list(filter(less4k, map(complete_href, products)))
        return products

    def __storage(self, products):
        conn = pymysql.connect(
            host='localhost', user='root', password="GUOan1992",
            database='crawler', port=3306,
            charset='utf8'
        )
        cursor = conn.cursor()
        for i in products:
            sql = "insert into goods values (\'" + i['title'] + "\', \'" + i['href'] + "\', " + \
                  i['price'] + ")"
            try:
                cursor.execute(sql)
                conn.commit()
            except Exception as e:
                print(e)
                conn.rollback()
        cursor.close()
        conn.close()

    # 展示（存数据库）
    def __show(self, products):
        for product in products:
            print(product)

    def go(self):
        for i in range(self.__pages):
            url = self.__url + str(i * 2 + 1)
            html = self.__fetch_content(url)
            products = self.__analysis(html)
            products = self.__refine(products)
            # self.__storage(products)
            self.__show(products)


if __name__ == '__main__':
    spider = Spider('笔记本', 2)
    spider.go()