import requests, random
from bs4 import BeautifulSoup

user_agent_list = [
    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
    "Mozilla/5.0 (Windows NT 10.0; …) Gecko/20100101 Firefox/61.0",
    "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)",
    "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10.5; en-US; rv:1.9.2.15) Gecko/20110303 Firefox/3.6.15"]
headers = {
    "User-Agent": random.choice(user_agent_list),
}


def getdata(url, i):
    try:
        response = requests.get(url, headers=headers)
        soup = BeautifulSoup(response.text, 'lxml')

        # for title_href in soup.find_all('div', class_='indent'):
        #     print([title_a.get('title') for title_a in title_href.find_all('a') if title_a.get('title')])

        for title_href in soup.find_all('div', class_='indent'):
             for title in title_href.find_all('a'):
                 if title.get('title'):
                     i = i + 1
                     print("Top %s : %s" % (str(i), title.get('title')))
    except urllib.error.URLError as e:
        if isinstance(e.reason, socket.timeout):
            print("Time Out.")

top = 25
for i in range(0, top, 25):
    url = "http://book.douban.com/top250?start=" + str(i)
    getdata(url, i)
