import requests
from urllib.parse import quote
import getua

class Crawl(object) :
    def __init__(self) :
        self.url = ''
        self.headers = {}

    def setUrl(self , url) :
        self.url = url

    def setHeaders(self , headers) :
        self.headers = headers

    def send(self , params) :
        resp = requests.get(self.url , headers = self.headers , params = params)
        return resp.text

    def save(self , filename , content) :
        with open(filename , 'w',encoding = 'utf-8') as f :
            f.write(content)

    def run(self) :
        word = input('请输入贴吧的名字：')
        page = int(input('请输入要保存的页数：'))
        for i in range(page) :
            params = {
                'kw' : quote(word) ,
                'pn' : i * 50
            }
            content = self.send(params)
            self.save(f'{word}{i + 1}.html' , content)


if __name__ == '__main__' :
    c = Crawl()
    c.setUrl('https://tieba.baidu.com/f?')
    c.setHeaders({
        'User-Agent' : getua.get_ua()
    })
    c.run()
