# -*- coding: utf-8 -*-
"""
Created on Thu Jul 19 21:41:07 2018

@author: tanle
"""
import requests
from bs4 import BeautifulSoup
import pymysql
import time

conn = pymysql.connect(host="127.0.0.1", port=3306, user="root", passwd="hmyhmy",db="test", charset='UTF8')  #连接自己的数据库
cur = conn.cursor()

urldata = []
name_data = []
img_data = []
address_data = []
all_city = []

user_agent =  "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36"
headers = {'User-Agent': user_agent,"Referer": "http://hotels.ctrip.com/Domestic/MapIframeDetail.aspx?city=2&province=2"}

request = requests.Session()#requests库的session会话对象可以跨请求保持某些参数
request.headers.update(headers)#添加请求头，防止被监测为机器人
all_city = []

base_url = "http://hotels.ctrip.com/events/beijing1/chongwubanzuoyou9zhejiudiansuibianzhupc.html"
url = requests.get(base_url,timeout = 30)
url.raise_for_status()#若发送了一个错误请求，可通过raise_for_status来抛出异常
html = url.text
soup = BeautifulSoup(html,"lxml")#解析器，可以理解为格式化分好类
data_city = soup.select('a[href^="/events/"]')#css选择器，BeautifulSoup中的.select,通过属性查找选择每一个href属性以/events/开头

for i in data_city:
    r = i.get("href")

    all_city.append(r)
    
for i in all_city  :
    user_agent =  "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36"
    headers = {'User-Agent': user_agent,"Referer": "http://hotels.ctrip.com/Domestic/MapIframeDetail.aspx?city=2&province=2"}

    request = requests.Session()
    request.headers.update(headers)
    base_url = "http://hotels.ctrip.com" + i
    url = requests.get(base_url,timeout = 30)
    url.raise_for_status()
    url.encoding = url.apparent_encoding
    html = url.text
    soup = BeautifulSoup(html,"lxml")
    data = soup.find_all("div",attrs = {"class":"tblk cf"})#find_all返回列表
    
    
    for i in range(len(data)):
        urldata.append(data[i].get('lnk')) 
        try:
            name_data.append("http:" + data[i].find("h3").get_text())

        except:
            img_data.append("")
        
        try:
            address_data.append(data[i].find("p",attrs={"class":"hotel_location"}).get_text())
        except:
            address_data.append("")
            
        try:
            img_data.append(data[i].find("img")['src'])
        except:

            img_data.append("")
        




for i in range(len(urldata)):
    if name_data[i] !="":
        
        sql = """INSERT INTO tmp_service (name, address,website) VALUES ("%s","%s","%s");""" % (name_data[i], address_data[i], urldata[i])#建立匹配的sql
        try:
            cur.execute(sql)
            print('信息提交成功')
            conn.commit()
        except Exception as e:
            print(e)
            conn.rollback()
            print ('信息提交失败')   
            
        sql = """INSERT INTO tmp_service_img (tmp_service_id,pic, ishome) VALUES ("%s","%s",1);""" % (cur.lastrowid,img_data[i])
        try:
            cur.execute(sql)
            print('信息提交成功')
            conn.commit()
        except Exception as e:
            print(e)
            print ('信息提交失败')  
            conn.rollback()
    