# 通过params传参
# project2中通过修改网址中的参数来进行搜索，与此同时也可以通过params来传参
import requests
from fake_useragent import FakeUserAgent  # 上次介绍过不再过多介绍

# 得到搜索内容
word = input('请输入想要搜索的内容：')

# 通过爬虫工具库来进行操作（https:spidertools.cn  使用url参数提取）
url = "https://www.baidu.com/s"

# 定义请求头
headers = {'User-Agent': FakeUserAgent().random}

# 用word的内容来替换wd后的内容
params = {
    "ie": "utf-8",
    "f": "8",
    "rsv_bp": "1",
    "tn": "baidu",
    "wd": word,
    "oq": "%E7%83%A4%E9%B1%BC",
    "rsv_pq": "dad339020001e8c4",
    "rsv_t": "fc211BqDIdZXvZpJ0Ep89YjpLmwzbWQCzNnWAB0C/OpMbodT2+iKRDoGjFk",
    "rqlang": "cn",
    "rsv_enter": "0",
    "rsv_dl": "tb",
    "rsv_sug3": "1",
    "rsv_sug1": "1",
    "rsv_sug7": "100",
    "rsv_btype": "t",
    "prefixsug": "%E7%83%A4%E9%B1%BC",
    "rsp": "0",
    "rsv_sug4": "651",
    "rsv_sug": "1"
}

# 发起网络请求
response = requests.get(url, params=params, headers=headers)

# 保存
with open(f'baidu_{word}.html', 'w', encoding='utf-8') as f:
    f.write(response.text)
