import csv
import re
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from pyquery import PyQuery as pq
import requests
brower = webdriver.Chrome()
wait = WebDriverWait(brower, 10)
def get_link():
    try:
        wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#menu-list')))
        html = brower.page_source
        doc = pq(html)
        t_list = []
        for i in range(1, 15):
            wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#menu-list > li')))
            items = doc('#menu-list > li:nth-child(' + str(i) + ') > a').items()
            for item in items:
                t_list.append(item.attr.href)
        for i in t_list:
            save_link('http://www.kekenet.com' + str(i))
        next_page()
    except TimeoutException:
        get_link()
def next_page():
    try:
        wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#menu-list > li')))
        next_button = wait.until(EC.presence_of_element_located(
            (By.CSS_SELECTOR, 'body > div.main > div.box > div.lastPage_left > div.page.th > a:nth-child(8)')))
        next_button.click()
        get_link()
    except TimeoutException:
        next_page()
def save_link(str):
    print(str)
    list = []
    list.append(str)
    with open("D:/Awei/link.csv", 'a', encoding='utf-8',newline='') as f:
        f_csv = csv.writer(f)
        f_csv.writerow(list)
if __name__ == '__main__':
    brower.get('http://www.kekenet.com/read/news/')
    get_link()
