
from pyspark import SparkContext,SparkConf
from pyspark.mllib.recommendation import ALS,Rating

def create_spark_context():
    spark_conf = SparkConf()\
        .setAppName('Pyspark-RecommendationDSystem')\
        .set("spark.driver.extraJavaOptions", "-Xss4096k")
            # .setMaster('spark://hadoop-node1:7077') 

    spark_context = SparkContext(conf=spark_conf) # entry 
    spark_context.setLogLevel('WARN')             
    return spark_context

def prepare_data(spark_context):
    # ------------read data -------------
    raw_user_data = spark_context.textFile("file:/usr/local/ml-100k/u.data")
    # raw_user_data = spark_context.textFile("hdfs://hadoop-node1:9000/input/u.data")
    raw_ratings = raw_user_data.map(lambda line: line.split("\t")[:3])
    ratings_rdd = raw_ratings.map(lambda x: Rating(int(x[0]), int(x[1]), float(x[2])))

    # ------------data analysis----------------
    num_ratings = ratings_rdd.count()
    num_users = ratings_rdd.map(lambda x: x[0]).distinct().count()
    num_movies = ratings_rdd.map(lambda x: x[1]).distinct().count()
    print("total:ratings: " + str(num_ratings) + ", User: " + str(num_users) + ", Moive: " + str(num_movies))
    return ratings_rdd

def save_mode(spark_context,model):
    try:
        # model.save(spark_context, "hdfs://hadoop-node1:9000/datas/als-model")
        model.save(spark_context, "/usr/local/datas/als-model")
    except Exception as e:

        print ("Error") 
        print(str(e))
        
if __name__ =="__main__":
    sc = create_spark_context()

    print("==================preparing data===================")
    rating_rdd = prepare_data(sc)
    print("==================model training===================")
    #ALS:rank=5",iterations = 5, lambda = 0.1
    als_model = ALS.train(rating_rdd,5,iterations=5,lambda_=0.1)
    print( als_model)
    print("==================model saving===================")
    save_mode(sc,als_model)
    sc.stop()
