# -*- coding: utf-8 -*-
# __author__ = 'Yuanjiang Huang'
# yuanjiang.huang@socialcredits.cn

import sys, os
import json
import traceback
from scpy.logger import get_logger

logger = get_logger(__file__)
CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
	CURRENT_PATH = CURRENT_PATH + '/'

from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.sql import SQLContext

if __name__ == "__main__":
	from pyspark.ml.feature import PCA
	from pyspark.mllib.linalg import Vectors
	master_url = 'local' #one worker thread
	# master_url = 'local'
	conf = SparkConf().setMaster(master_url).setAppName('Demo')
	sc = SparkContext(conf=conf)
	sqlContext = SQLContext(sc)
	data = [
	        (Vectors.dense([0, 0.0, 3.0, 4.0, 5.0]),),
	        (Vectors.dense([0, 0.0, 0.0, 6.0, 7.0]),),
	        (Vectors.dense([0, 0, 0, 0, 1]),),
		(Vectors.dense([1, 2, 4, 5, 1]),)
	]
	df = sqlContext.createDataFrame(data, ["features"])
	pca = PCA(k=3, inputCol="features", outputCol="pcaFeatures")
	model = pca.fit(df)
	result = model.transform(df).select("pcaFeatures")
	result.show(truncate=False)