package spark.MLlib;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.mllib.linalg.Matrices;
import org.apache.spark.mllib.linalg.Matrix;
import org.apache.spark.mllib.linalg.Vector;
import org.apache.spark.mllib.linalg.Vectors;
import org.apache.spark.mllib.linalg.distributed.MatrixEntry;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.util.MLUtils;

/**
 * 作者: LDL
 * 功能说明:
 * 创建日期: 2015/6/29 15:39
 */
public class DataTypes {

    public static void main(String[] args) {
        System.setProperty("hadoop.home.dir", "D:\\develop\\tools\\hadoop-common-2.2.0-bin-master");
        SparkConf conf = new SparkConf().setMaster("local").setAppName("JavaDataTypes");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        Vector dv = Vectors.dense(1.0, 0.0, 3.0);
        Vector sv = Vectors.sparse(3, new int[]{0, 2}, new double[]{1.0, 3.0});

        LabeledPoint pos = new LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0));
        LabeledPoint neg = new LabeledPoint(0.0,Vectors.sparse(3, new int[]{0, 2}, new double[]{1.0, 3.0}));

        JavaRDD<LabeledPoint> examples = MLUtils.loadLibSVMFile(jsc.sc(),"D:\\develop\\tools\\spark-1.4.0\\data\\mllib\\sample_libsvm_data.txt").toJavaRDD();

        Matrix dm = Matrices.dense(3, 2, new double[]{1.0, 3.0, 5.0, 2.0, 4.0, 6.0});

        MatrixEntry m = new MatrixEntry(1,1,10);
    }
}
