import requests
from bs4 import BeautifulSoup

def get_html(url):
    resp = requests.get(url)
    return resp.text

def get_url(url):
    html_text = get_html(url)
    body = BeautifulSoup(html_text,'html.parser')
    div = body.find_all('div', {'class', 'panel panel-default'})
    for i in range(div.__len__()):
        if(i.__eq__(0)):
            continue
        divs = div[i]
        dataList = divs.find('dl',{'class':'panel-body panel-chapterlist'}).find_all('dd')
        # for data in dataList:
        #     a = data.find('a')
        #     if(a['href'].__eq__('#######')):
        #         continue
        #     write_data(url + str(a['href']),a.get_text()+'.txt' )
        for j in range(dataList.__len__()):
            if(j.__lt__(2697)):
                continue
            a = dataList[j].find('a')
            if(a['href'].__eq__('#######')):
                continue
            write_data(url + str(a['href']),a.get_text()+'.txt' )


def write_data(url, name):
    html = BeautifulSoup(get_html(url), 'html.parser')
    body = html.find('div',{'class':'panel-body'},)
    text = body.get_text()
    doc = text.replace('    ', '\r\n')
    filename = name.replace('?','');
    print(doc)
    with open(r'dir'+filename, 'w', encoding='utf-8', errors='ignore') as f:
        f.writelines(doc)

if __name__ == '__main__':
    get_url('url')