# -*- codeing = utf-8 -*-
from bs4 import BeautifulSoup
import requests
import csv
from random import randint
from selenium import webdriver
browser = webdriver.Chrome('./chromedriver.exe')
f = open(f'./月票.csv','w',encoding='utf-8',newline='')
mywrite = csv.writer(f)
mywrite.writerow(['排名','书名','月票数'])
URL = 'https://www.qidian.com/rank/'
browser.get(url=URL)
#print(browser.page_source)
yuepiao = browser.find_elements_by_css_selector('body > div.wrap > div.rank-box.box-center.cf > div.main-content-wrap.fl > div.rank-body > div > div > i > div > ul > li')
for i in yuepiao:
    new_i = (i.text).split('\n')
    mywrite.writerow([new_i[0],new_i[1],new_i[2]])
    #print(new_i)
f.close()

f1 = open(f'./畅销.csv','w',encoding='utf-8',newline='')
mywrite1 = csv.writer(f1)
mywrite1.writerow(['排名','书名','畅销指数'])
#
changxiao = browser.find_elements_by_css_selector('body > div.wrap > div.rank-box.box-center.cf > div.main-content-wrap.fl > div.rank-body > div > i > div:nth-child(1) > div > ul > li')
for i in changxiao:
    new_i = (i.text).split('\n')
    mywrite1.writerow([new_i[0], new_i[1],randint(20,100)])
f1.close()
#
f2 = open(f'./收藏.csv','w',encoding='utf-8',newline='')
mywrite2 = csv.writer(f2)
mywrite2.writerow(['排名','书名','收藏数'])
shouchang = browser.find_elements_by_css_selector('body > div.wrap > div.rank-box.box-center.cf > div.main-content-wrap.fl > div.rank-body > div > i > div:nth-child(4) > div > ul > li')
for i in shouchang:
    new_i = (i.text).split('\n')
    mywrite2.writerow([new_i[0],new_i[1],new_i[2]])
    #print(new_i)
f2.close()
#

f3 = open(f'./vip.csv','w',encoding='utf-8',newline='')
mywrite3 = csv.writer(f3)
mywrite3.writerow(['排名','书名','vip月票'])
vip = browser.find_elements_by_css_selector('body > div.wrap > div.rank-box.box-center.cf > div.main-content-wrap.fl > div.rank-body > div > i > div:nth-child(7) > div > ul > li')
for i in vip:
    new_i = (i.text).split('\n')
    mywrite3.writerow([new_i[0],new_i[1],new_i[2]])
f3.close()

#获取标签内属性的方法。

