package spark.streaming
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.feature.StandardScaler
import org.apache.spark.mllib.regression.{LabeledPoint,StreamingLinearRegressionWithSGD}
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import breeze.linalg.DenseVector
import org.apache.spark.{SparkContext,SparkConf}

object streamExample {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("Spark Streaming example")
    val sc = new SparkContext(conf)
    val ssc = new StreamingContext(sc,Seconds(10))
    val stream = ssc.textFileStream("/sparkMLlib/traindir/winequality-red-01.csv")

    //val NumFeaturs = 11
    val NumFeatures = 11
    val zeroVector = DenseVector.zeros[Double](NumFeatures)
    val model = new StreamingLinearRegressionWithSGD()
      .setInitialWeights(Vectors.dense(zeroVector.data))
      .setNumIterations(20)
      .setRegParam(0.8)
      .setStepSize(0.01)
    
    val labeledStream = stream.map { line => 
      val split = line.split(";")
      val y = split(11).toDouble
      //val y = split(11).toDouble
      val features = split.slice(0,11).map(_.toDouble)
      LabeledPoint(label = y,features = Vectors.dense(features))
    }

    model.trainOn(labeledStream)
    //model.predictOnValues(labeledStream.map(lp => (lp.label,lp.features))) .print()
    model.predictOnValues(labeledStream.map(lp => (lp.label, lp.features))) .print()

    ssc.start()
    ssc.awaitTermination()
    
  }
}
