
from selenium import webdriver
from bs4 import BeautifulSoup

tmppage_path = r"C:\users\daniel\desktop\page.html"
output_path = r"C:\users\daniel\desktop\ret.csv"

browser = webdriver.Chrome()
browser.get("https://pan.baidu.com/")
tmp = input('Press Enter after selecting directory:')

f = open(tmppage_path, 'w', encoding='utf-8')
f.write(browser.page_source)
f.close()

f = open(tmppage_path, encoding='utf-8')
text = f.read()
f.close()

soup = BeautifulSoup(text, "lxml")
l = soup.find_all('div','file-name')

ret = []
for i in l:
    try:
        ret.append(i.div.a.string)
    except:
        pass

f = open(output_path,'w',encoding='utf-8')
f.write('\n'.join(ret))
f.close()
