#--*--coding: utf-8 --*--
# 1单页访问
# 2猫咪详情
# 3数据写入
# 4判断是否末页 如果为真进行第五步
# 5 重复 1~4步

import requests
import re
import pymysql
from lxml import etree


header = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36"}
URL = "http://www.maomijiaoyi.com/index.php?/chanpinliebiao_c_2.html"
URL2 = "http://www.maomijiaoyi.com"

db = pymysql.connect(
    host="192.168.139.110",
    port=3306,
    user="root",
    password="123456",
    db="pet_data",
    charset='utf8'
)
cursor = db.cursor()

def openwebsite(url):
    getHtml = requests.get(url,headers=header)
    html = etree.HTML(getHtml.text.encode('utf-8'))
    detailList = html.xpath("//div[@class='breeds_floor']/div/div/a/@href")
    endpage = html.xpath("//div[@class='maigou_maigou_page']/a[last()]")
    nextpage = html.xpath("//div[@class='maigou_maigou_page']/a[last()-1]")
    endpage_url = html.xpath("//div[@class='maigou_maigou_page']/a[last()]/@href")  # type list
    nextpage_url = html.xpath("//div[@class='maigou_maigou_page']/a[last()-1]/@href")  # type list
    # print(endpage[0].text.replace(' ', ''), endpage_url)  # 末页
    #
    # print(nextpage[0].text.replace(' ', ''), endpage_url)  # 下一页
    return (html, detailList, endpage, nextpage, endpage_url, nextpage_url)


def clean(*data):
    for d in data:
        data = d[0].replace(' ', '')
        data = data.replace('\n', '')
        data = data.replace('\r', '')
        data = data.replace(',','')
        data = data.replace('，','')
        return data


def detail(detailList, url2):
    for x in detailList:
        # print(x)
        detailhtml = requests.get(url2 + x)
        detailhtml = etree.HTML(detailhtml.text.encode('utf-8'))

        # 地理位置
        province = detailhtml.xpath("//div[@class='bread_crumb']/a[last()-2]/span/text()")  # 省
        city = detailhtml.xpath("//div[@class='bread_crumb']/a[last()-1]/span/text()")  # 市

        # 基本信息
        name = detailhtml.xpath("//div[@class='detail_text']/div[1]/text()")  # 猫咪名字
        vistorcount = detailhtml.xpath("//div[@class='info1']/div[1]/span[@class='red'][1]/text()")  # 浏览次数
        pure = detailhtml.xpath("//div[@class='xinxi_neirong']/div[1]/div[2]/div[1]/span/text()")  # 是否纯种
        type = detailhtml.xpath("//div[@class='xinxi_neirong']/div[1]/div[2]/div[2]/span/text()")  # 猫咪品种
        haved = detailhtml.xpath("//div[@class='xinxi_neirong']/div[1]/div[2]/div[3]/span/text()")  # 待售数量
        sex = detailhtml.xpath("//div[@class='xinxi_neirong']/div[1]/div[2]/div[4]/span/text()")  # 猫咪性别
        detail_index = detailhtml.xpath("//div[@class='bread_crumb']/a[last()]/@href")  # 详情页
        picture = detailhtml.xpath("//div[@class='small_img']/div[2]/img/@src") #猫猫图片

        # 商家卖家信息
        shopname = detailhtml.xpath("//div[@class='dinming']/text()")  # 店名
        price = detailhtml.xpath("//span[@class='red size_24']/text()")  # 价格 (元)
        person = detailhtml.xpath("//div[@class='user_info']/div[1]/div[2]/text()")  # 联系人
        phone_number = detailhtml.xpath("//div[@class='user_info']//span[2]/text()")  # 电话
        postage = detailhtml.xpath("//div[@class='user_info']/div[3]/div[2]/text()")  # 包邮

        # 健康防御
        vaccine = detailhtml.xpath("//div[@class='xinxi_neirong']/div[2]/div[2]/div[1]/span/text()")  # 疫苗情况
        debug = detailhtml.xpath("//div[@class='xinxi_neirong']/div[2]/div[2]/div[2]/span/text()")  # 驱虫情况
        age = detailhtml.xpath("//div[@class='xinxi_neirong']/div[2]/div[2]/div[3]/span/text()")  # 猫咪年龄
        video = detailhtml.xpath("//div[@class='xinxi_neirong']/div[2]/div[2]/div[4]/span/text()")  # 能否视频

        haved = re.findall(r'\d+',clean(haved))
        vaccine = re.findall(r'\d+',clean(vaccine))
        age = re.findall(r'\d+',clean(age))

        # print(province, city, name, vistorcount, pure, type, haved, sex, detail_index, shopname, price, person, phone_number, postage, vaccine, debug, age, video)
        # print(clean(province)+','+ clean(city)+','+ clean(name)+','+ clean(vistorcount)+','+ clean(pure)+','+ clean(type)+','+ clean(haved)+','+ clean(sex)+','+ clean(detail_index)+','+ clean(shopname)+','+ clean(price)+','+ clean(person)+','+ clean(phone_number)+','+ clean(postage)+','+ clean(vaccine)+','+ clean(debug)+','+ clean(age)+','+ clean(video)+'\n')

        # print(clean(province) + ',' + clean(city) + ',' + clean(name) + ',' + clean(vistorcount) + ',' + clean(
        #         pure) + ',' + clean(type) + ',' + haved[0] + ',' + clean(sex) + ',' + URL2+clean(
        #         detail_index) + ',' + URL2+clean(picture) + ',' + clean(shopname) + ',' + clean(price) + ',' + clean(person) + ',' + clean(
        #         phone_number) + ',' + clean(postage) + ',' + vaccine[0] + ',' + clean(debug) + ',' + age[0] + ',' + clean(video) + '\n')

        # with open("newpetdata.txt", 'a', encoding='utf-8') as f:
        #     f.write(clean(province) + ',' + clean(city) + ',' + clean(name) + ',' + clean(vistorcount) + ',' + clean(
        #         pure) + ',' + clean(type) + ',' + haved[0] + ',' + clean(sex) + ',' + clean(
        #         detail_index) + ',' + clean(picture) + ',' + clean(shopname) + ',' + clean(price) + ',' + clean(person) + ',' + URL2+clean(
        #         phone_number) + ',' + clean(postage) + ',' + vaccine[0] + ',' + clean(debug) + ',' + age[0] + ',' + clean(video) + '\n')

        SQL = """INSERT INTO `dwd_pet_data`(`province`, `city`, `pet_name`, `vistor_count`, `pure`, `pet_type`, `pet_haved`, `sex`, `detail_index`, `picture`, `shopname`, `price`, `person`, `phone_number`, `postage`, `vaccine`, `debug`, `age_month`, `video`) \
        VALUES ('%s', '%s', '%s', %s, '%s', '%s', %s, '%s', '%s', '%s' ,'%s', %s, '%s', '%s', '%s', %s, '%s', %s, '%s')"""\
              %(clean(province),clean(city),clean(name),clean(vistorcount),clean(
                pure),clean(type),haved[0],clean(sex),URL2+clean(
                detail_index),URL2+clean(picture),clean(shopname),clean(price),clean(person),clean(
                phone_number),clean(postage),vaccine[0],clean(debug),age[0],clean(video))
        cursor.execute(SQL)
        # # 提交到数据库执行
        db.commit()
    #     cursor.close()
    # # except:
    # #     # 如果发生错误则回滚
    # #     # db.rollback()
    # #     pr
    #     db.close()


if __name__ == "__main__":

    print("开始爬取数据")
    global BigData
    BigData = openwebsite(URL)
    print("开始写入数据")
    detail(BigData[1], URL2)


    while True:
        print("下一页")
        print(BigData[2][0].text.replace(' ', ''))
        if '末页' in BigData[2][0].text.replace(' ', ''):
            url3 = URL2+BigData[5][0]
            print(url3)
            BigData = openwebsite(url3)
            detail(BigData[1], URL2)
        else:
            break
    cursor.close()
    db.close()