from bs4 import BeautifulSoup
import sys
sys.path.append('..\kernel')
sys.path.append('..\model')
import spider_main
import asyncio
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import city_xcurl_3_model
import time
import threading
class City_hotelController(object):
    def __init__(self):
        #实例化核心对象
        self.spider = spider_main.SpiderMain()
        self.hosts_url='http://m.ctrip.com'

    async def city_hotel(self,h_url,webdriver_obr):
        print(h_url)
        data=self.get_html(h_url,webdriver_obr)
        html_obj =BeautifulSoup(data,'html.parser',from_encoding='utf-8')
        li_data= html_obj.find_all('li',attrs={'class':'item'})
        rs_hotel_data=[]#保存数据
        for val in li_data:
            try:
                rs_data={}
                rs_data['name'] = val.h3.contents[0].strip()

                rs_data['price'] = val.find('span', attrs={'class': 'js-cas-p'}).text
                if rs_data['price']==None:
                    rs_data['price'] = val.find('span', attrs={'class': 'price'}).text

                rs_hotel_data.append(rs_data)
            except:
                continue
        print(rs_hotel_data)

    def get_html(self,h_url,webdriver_obr):

        # 下载页面
        url = self.hosts_url + h_url
        # webdriver_obr.implicitly_wait(5)
        webdriver_obr.get(url)  # page_source是获取html
        re_data =webdriver_obr.page_source

        return re_data

    def get_hotel_url(self):
        c_model = city_xcurl_3_model.Xcurl_3()
        url_data=c_model.get_xcurl_by_id(0)
        return url_data
        #print(url_data)

if __name__=='__main__':
    start = time.clock()
    desire = DesiredCapabilities.PHANTOMJS.copy()

    #设置phantomjs参数并启动
    headers = {
        'Accept': '*/*',
        'Accept-Language': 'en-US,en;q=0.8',
        'Cache-Control': 'max-age=0',
        'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.4.160.175:808 Safari/537.36',
        'Connection': 'keep-alive',
        'Referer': 'http://m.ctrip.com'
    }
    for key, value in headers.items():
        desire['phantomjs.page.customHeaders.{}'.format(key)] = value
    service_args = []
    service_args.append('--load-images=no')  ##关闭图片加载
    service_args.append('--disk-cache=yes')  ##开启缓存
    service_args.append('--ignore-ssl-errors=true')  ##忽略https错误
    service_args.append('--proxy=122.4.160.175:808')  ##忽略https错误
    service_args.append('--proxy-type=http')
    webdriver_obr = webdriver.PhantomJS(desired_capabilities=desire, service_args=service_args)  # 设置请求头，不然会被拒绝访问

    obj=City_hotelController()
    data=obj.get_hotel_url()#获取需要爬取的url

    # [obj.city_hotel(var['url'],webdriver_obr) for var in data]
    loop = asyncio.get_event_loop()
    loop.run_until_complete(asyncio.wait([obj.city_hotel(var['url'],webdriver_obr) for var in data]))
    loop.close()
    webdriver_obr.quit()
    end = time.clock()
    print('Running time: %s Seconds' % (end - start))
    #loop=asyncio.get_event_loop()
#性能优化
#https://thief.one/2017/03/01/Phantomjs%E6%80%A7%E8%83%BD%E4%BC%98%E5%8C%96/
#https://thief.one/2017/03/31/Phantomjs%E6%AD%A3%E7%A1%AE%E6%89%93%E5%BC%80%E6%96%B9%E5%BC%8F/