# @Author: XieYinJie @ProjectName: 毕业设计 @DateTime: 2020/8/27 16:00

from flask import Flask, redirect, url_for, request, render_template, flash
from mianCode.CrawlerAccess import JudmentSpider


app = Flask(__name__)
# app = Flask(__name__)
app.config.from_pyfile('config.ini')

# 使用flask获取表单数据
@app.route('/')
def index():
    return render_template('search.html')

# 使用请求勾子来获取请求的页面形式
@app.before_request
def requestFlag():
    data = request.form  # TODO /1/这里获取爬虫接口名 然后仿照着这里，在2里面使用相同的方法获取爬虫名

# 接受数据，传递数据 返回数据
@app.route('/search/<search>')
def search(search):
    SpiderName = '必应'  # TODO /2/相办法获取爬虫接口名, 使用js获取
    allSpider = JudmentSpider()
    datas = allSpider.spider_judment(SpiderName, search)
    return render_template('results.html', datas=datas, searchStr=search)

@app.route('/search', methods=['POST', 'GET'])
def get_form():
    if request.method == 'POST':
        searchContents = request.form['search']  # 获取指定name的input标签的内容
        return redirect(url_for('search', search=searchContents))
    return redirect(url_for('index'))

@app.route('/search/<search>/<int:page>', methods=['GET'])
def loop_get(search, page):
    SpiderName = '必应'  # TODO /2/相办法获取爬虫接口名
    print(search)
    print(page)
    allSpider = JudmentSpider()
    datas = allSpider.spider_judment(SpiderName, search, page)
    if datas:
        return render_template('loopHtml.html', datas=datas)
    else:
        return """<p style="text-align: center; background-color: rgba(0,0,0,0.25); color: rgba(0,0,0,0.8);">到底了</p><div id="new"><div id="replace"></div></div><script>document.documentElement.scrollTop = document.documentElement.scrollHeight - 1001;</script>"""


# 异常捕获 404页面
@app.route('/404')  # 加上404路由
@app.errorhandler(404)
def error_404(e):
    return render_template('404.html'), 404

@app.route('/500')
@app.errorhandler(500)
def error_500(e):
    return render_template('500.html'), 500

if __name__ == '__main__':
    app.run()  # host='0.0.0.0' 开启host之后就是所有主机都可以访问的了
