from flask import render_template
from flask import Flask,request,redirect
# from livereload import Server
from pyspark.ml.recommendation import ALS
from pyspark.ml.recommendation import ALSModel
from pyspark import SparkContext
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit
import json
import time

def makeRecommendations(model,userID,number):
    toRecommend = modelnew.itemFactors.selectExpr("id as artist").withColumn("user",lit(userID))
    toRecommend2 = toRecommend.withColumn("artist",toRecommend['artist'].cast("Int")).withColumn("user",toRecommend['user'].cast('Int'))
    toRecommend2.printSchema()
    www = modelnew.transform(toRecommend2).select("artist","prediction").orderBy('prediction',ascending = False).take(number)
    return www


def artistPredict(userID):
    recommend = makeRecommendations(modelnew,userID,10)
    www = recommend
    for j in range(len(www)):
        i = www[j]
        #print(i.artist)
        name = artistByID.filter(artistByID['artist'] == str(i.artist)).select('name').collect()[0]
        i = i.asDict()
        #print(name)
        i.update({'name':name.name })
        www[j] = i
    f = open('/usr/local/spark/test/code/static/data/predict.json', 'w')
    f.write(json.dumps(www))
    f.close()
    return json.dumps(www)
    


app = Flask(__name__)

@app.route('/')
def index():
    #使用 render_template() 方法来渲染模板
    return render_template('index.html')

@app.route('/direct',methods=['GET','POST'])
def predict():
    if request.method == 'POST':
        userID = request.form['userID']
        print(userID)
        message = userID
        mess = artistPredict(userID)
        print(mess)
        time.sleep(3)
        # return reder_template('genre-predict.html')
    return render_template('genre-predict.html',message = mess,userid=message)
        

@app.route('/<filename>')
def req_file(filename):
    return render_template(filename)

if __name__ == '__main__':
    app.DEBUG=True#代码调试立即生效
    app.jinja_env.auto_reload = True#模板调试立即生效
    
    
 
sc = SparkContext('local','test')
sc.setLogLevel("WARN")


spark = SparkSession.builder.getOrCreate()
modelnew = ALSModel.load("/usr/local/spark/Model/modelnew")
artistByID = spark.read.csv("/usr/local/spark/Model/artistByID").toDF("artist","name")
trainData = spark.read.csv("/usr/local/spark/Model/trainData").toDF("user","artist","count")

trainData.cache()
artistByID.cache()
trainData= trainData.withColumn('count',trainData['count'].cast('int'))
trainData.printSchema()
app.run()#用 run() 函数来让应用运行在本地服务器上
