import csv
import time
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By


# 新建一个 fliggy.csv 文件,w 表示是新创建一个文件, encoding 为 utf-8-sig，中文能正常显示,
# newline="" 不能省略，省略了之后，会换行
with open("fliggy.csv", "w", encoding='utf-8-sig', newline="") as f:
    csv_write = csv.writer(f)
    csv_head = ["景点", "城市", "当月销量", "优惠价", "价格"]
    csv_write.writerow(csv_head)

options = Options()
# options.add_argument('--headless')  # headless, 无头浏览器, 不显示用户界面
options.add_argument('--disable-gpu')  # 不使用 gpu
# options.add_argument("--incognito")  # 以隐身模式打开
prefs = {"profile.managed_default_content_settings.images": 2}  # 不显示图片
options.add_experimental_option("prefs", prefs)
# chromedriver.exe 的路径
driver_path = "C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe"
driver = webdriver.Chrome(executable_path=driver_path, options=options)

url = 'http://s.fliggy.com/scenic/list.htm?q=广州'
driver.get(url)
print(url)
time.sleep(10)

try:
    # 获取页数
    next_num = driver.find_element(By.XPATH, "//a[@class='pi-pagination-num'][3]").text
    print(next_num)
except Exception as e:
    # 处理只有一页的情况
    next_num = 1
    print(e)

for i in range(0, int(next_num)):
    links = driver.find_elements(By.XPATH, "//a[@class='pi-btn pi-btn-primary']")

    for link in links:
        try:
            print(link.get_attribute("href"))
            res = requests.get(link.get_attribute("href"), timeout=10)
            # print(res.text)
            soup = BeautifulSoup(res.text, 'lxml')
            name = soup.find('h3', {'class': 'scenic-tit'}).get_text().strip()
            city = soup.find('span', {'class': 'scenic-subtit'}).get_text().strip()
            sell_count = soup.find('dl', {'class': 'sell-count'}).find('dd').find('em').get_text().strip()
            scenic_price = soup.find('span', {'class': 'pi-price-lgt'}).get_text().strip()
            price = soup.find('div', {'class': 'right-area'}).find('span', {'class': 'pi-price'}).get_text().strip()

            row = [name, city, sell_count, scenic_price, price]

            out = open("fliggy.csv", "a+", encoding='utf-8-sig', newline="")
            csv_writer = csv.writer(out, dialect="excel")
            csv_writer.writerow(row)
            out.close()
        except Exception as e:
            print(e)
    try:
        # 点击下一页
        driver.find_element(By.CLASS_NAME, 'pi-pagination-next').click()
    except Exception as e:
        print(e)

    time.sleep(3)

# 退出浏览器
driver.quit()

row = [name, city, sell_count, scenic_price, price]
out = open("fliggy.csv", "a+", encoding='utf-8-sig', newline="")
csv_writer = csv.writer(out, dialect="excel")
csv_writer.writerow(row)
out.close()