from flask import Flask,render_template,request,redirect
app=Flask(__name__)

#首页 给一个表单发请求 去爬虫
@app.route("/")
def index():
    return render_template("index.html")

#处理用户登陆的请求
@app.route("/login",methods=['POST','GET'])
def login():
    username=request.form['username']
    userpwd=request.form['userpwd']
    if username == "doyin":
        if userpwd == "123456":
            return redirect("scrawl")
        else:
            return redirect("/")
    else:
        return redirect("/")
#爬虫 网页端表单设计
@app.route("/scrawl")
def scr():
    return render_template("scrawl.html")

@app.route("/start",methods=['POST','GET'])
def start():
    name=request.form['targetname']
    url=request.form['targeturl']
    return str(crawl(url,selector="p")) #调用爬虫函数

from bs4 import BeautifulSoup
import requests
def crawl(url,selector):
    headers={
        "User-Agent": "Mozilla/5.0(Windows NT 10.0; Win64; x64) AppleWebKit/537.36(KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 Edg/111.0.1661.44"
    }
    htmlcodes=requests.get(url,headers=headers).text
    bs=BeautifulSoup(htmlcodes,'html.parser')
    return [item.text for item in bs.select(selector=selector)]

if __name__=="__main__":
    app.run(host="0.0.0.0",port=8080)