from flask import Flask, render_template, request
from crawl import spider_book as sb
import pandas as pd
import json

# 定义全局变量，保存书名
book_name = ''

app = Flask(__name__)


# 初始搜索页面
@app.route('/')
def index():
    return render_template('index.html')


# 进行搜索，启动爬虫，并把数据以json格式发送
@app.route('/search')
def search():
    global book_name
    book_name = request.args.get('book_name')
    sb.main(book_name)
    print(book_name)
    file_dir = './data/'
    data_jd = pd.read_excel(file_dir + '京东' + book_name + '.xlsx').head(5)
    data_dangDang = pd.read_excel(file_dir + '当当' + book_name + '.xlsx').head(5)
    jd_url = list(data_jd["链接"].values)
    jd_price = list(data_jd["价格"].values)
    dangDang_url = list(data_dangDang["链接"].values)
    dangDang_price = list(data_dangDang["价格"].values)
    table_Data = {
        "list_jd": [],
        "list_dangDang": []
    }
    for i in range(len(jd_url)):
        jd_information = {
            "url": jd_url[i],
            "price": jd_price[i]
        }
        table_Data['list_jd'].append(jd_information)
    for i in range(len(dangDang_url)):
        dangDang_information = {
            "url": dangDang_url[i],
            "price": dangDang_price[i]
        }
        table_Data['list_dangDang'].append(dangDang_information)
    resp = json.dumps(table_Data)
    return resp


if __name__ == '__main__':
    app.run(host='0.0.0.0', debug=True)
