import requests
from lxml import etree
from selenium import webdriver
import time
from selenium.webdriver.chrome.options import Options
import logging
import re

base_url = 'http://www.qxkp.net/'
'''
# 气象服务
    http://www.qxkp.net/qxfw/shqx/ # 生活气象
    http://www.qxkp.net/qxfw/nyqx/ # 农业气象
    http://www.qxkp.net/qxfw/lyqx/ # 旅游气象
    http://www.qxkp.net/qxfw/jtqx/ #  交通气象
# 灾害防御
    http://www.qxkp.net/zhfy/ghzh/ # 干旱灾害
    http://www.qxkp.net/zhfy/byhl/ # 暴雨洪涝
    http://www.qxkp.net/zhfy/hczh/ # 寒潮灾害
    http://www.qxkp.net/zhfy/gwfh/ # 高温防护
# 学习园地
    http://www.qxkp.net/xxyd/qxsy/ # 气象术语
    http://www.qxkp.net/xxyd/qxzhyjxx/ # 预警信号
    http://www.qxkp.net/xxyd/qxzs/ # 气象指数
    http://www.qxkp.net/xxyd/qxyy/ # 气象谚语
# 在线互动
    http://www.qxkp.net/zxhd/ywyd/ # 有问有答
    http://www.qxkp.net/zxhd/zjhd/ # 专家互动
'''

'''
# 气象服务
    http://www.qxkp.net/qxfw/xyqx/ # 校园气象
    http://www.qxkp.net/qxfw/wcnr/ # 未成年人
    http://www.qxkp.net/qxfw/nm/ # 农民
    http://www.qxkp.net/qxfw/czldz/ # 城镇劳动者
    http://www.qxkp.net/qxfw/sqjm/ # 社区居民
    http://www.qxkp.net/qxfw/gwy/ # 公务员
    http://www.qxkp.net/qxfw/kphd/ # 科普活动
    http://www.qxkp.net/qxfw/kprd/ # 科普热点
# 灾害防御
    http://www.qxkp.net/zhfy/scb/ # 沙尘暴
    http://www.qxkp.net/zhfy/tffy/ # 台风防御
    http://www.qxkp.net/zhfy/flaq/ # 防雷安全
    http://www.qxkp.net/zhfy/fzjz/ # 防震减灾
    http://www.qxkp.net/zhfy/jdtqsj/ # 极端天气事件
# 学习园地
    http://www.qxkp.net/xxyd/qhbh/ # 气候变化
    http://www.qxkp.net/xxyd/qhkp/ # 气侯科普
    http://www.qxkp.net/xxyd/essjq/ # 二十四节气
    http://www.qxkp.net/xxyd/kptstj/ # 科普图书推荐
    http://www.qxkp.net/xxyd/rgyxtq/ # 人工影响天气
    http://www.qxkp.net/xxyd/essjqfm/ # 二十四节气封面
    http://www.qxkp.net/xxyd/qxkj/ # 气象科技
    http://www.qxkp.net/xxyd/yjxx/ # 预警信息
'''

headers = {
    'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36',
}

def save_url_and_title(url, title):
    content = title + ',' + url + '\n'
    logging.info(content)
    with open('url_and_title3.csv', 'a') as f:
        f.write(content)

def get_url(driver, page_url):
    driver.get(page_url)
    list_url = driver.find_elements_by_xpath('/html/body/div[1]/div[4]/div[2]/div/div[2]/ul/li/div[1]/a')
    list_title = driver.find_elements_by_xpath('/html/body/div[1]/div[4]/div[2]/div/div[2]/ul/li/div[1]/a')
    for i in range(len(list_url)):
            article_url = list_url[i].get_attribute("href")
            title = list_title[i].text
            print(article_url, title)
            save_url_and_title(article_url, title)
    try:
        next_url = driver.find_element_by_xpath('/html/body/div[1]/div[4]/div[2]/div/div[3]/div[1]/a[3]')
    except Exception as e:
        print(e)
        next_url = ''
    if not next_url:
        try:
            next_url = driver.find_element_by_xpath('/html/body/div[1]/div[4]/div[2]/div/div[3]/div[1]/a[1]')
            if next_url.text == '首页':
                print('最后一页')
                return None
        except Exception as e:
            print(e)
    if next_url:
        next_url = next_url.get_attribute("href")
    else:
        return None
    print('下一页:', next_url)
    get_url(driver, next_url)

logging.basicConfig(level=logging.INFO, filename='url_and_title3.log')
'''
# 气象服务
    http://www.qxkp.net/qxfw/xyqx/ # 校园气象
    http://www.qxkp.net/qxfw/wcnr/ # 未成年人
    http://www.qxkp.net/qxfw/nm/ # 农民
    http://www.qxkp.net/qxfw/czldz/ # 城镇劳动者
    http://www.qxkp.net/qxfw/sqjm/ # 社区居民
    http://www.qxkp.net/qxfw/gwy/ # 公务员
    http://www.qxkp.net/qxfw/kphd/ # 科普活动
    http://www.qxkp.net/qxfw/kprd/ # 科普热点
# 灾害防御
    http://www.qxkp.net/zhfy/scb/ # 沙尘暴
    http://www.qxkp.net/zhfy/tffy/ # 台风防御
    http://www.qxkp.net/zhfy/flaq/ # 防雷安全
    http://www.qxkp.net/zhfy/fzjz/ # 防震减灾
    http://www.qxkp.net/zhfy/jdtqsj/ # 极端天气事件
# 学习园地
    http://www.qxkp.net/xxyd/qhbh/ # 气候变化
    http://www.qxkp.net/xxyd/qhkp/ # 气象科普
    http://www.qxkp.net/xxyd/essjq/ # 二十四节气
    http://www.qxkp.net/xxyd/kptstj/ # 科普图书推荐
    http://www.qxkp.net/xxyd/rgyxtq/ # 人工影响天气
    http://www.qxkp.net/xxyd/essjqfm/ # 二十四节气封面
    http://www.qxkp.net/xxyd/qxkj/ # 气象科技
    http://www.qxkp.net/xxyd/yjxx/ # 预警信息
'''
list_url_path = [
            'qxfw/xyqx/', 'qxfw/wcnr/', 'qxfw/nm/', 'qxfw/czldz/', 'qxfw/sqjm/', 'qxfw/gwy/', 'qxfw/kphd/', 'qxfw/kprd/'
            'zhfy/scb/', 'zhfy/tffy/', 'zhfy/flaq/', 'zhfy/fzjz/', 'zhfy/jdtqsj/',
            'xxyd/qhbh/', 'xxyd/qhkp/', 'xxyd/essjq/', 'xxyd/kptstj/', 'xxyd/rgyxtq/','xxyd/essjqfm/', 'xxyd/qxkj/', 'xxyd/yjxx/'
]

base_url = 'http://www.qxkp.net/'
first_url = 'http://www.qxkp.net/qxfw/shqx/'
ops = Options()
# ops.add_argument('--proxy-server=http://112.87.69.76:9999')
driver = webdriver.Chrome(executable_path='/home/ubuntu/桌面/ljz/dianping/chromedriver',chrome_options=ops)
driver.get(first_url)
time.sleep(2)
for url_path in list_url_path:
    url = base_url + url_path
    get_url(driver, url)
    time.sleep(2)

