package com.darrenchan.spark

import java.io.PrintWriter
import java.net.ServerSocket
import java.text.SimpleDateFormat
import java.util.Date

import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.util.Random
/**
  * Created by tsun on 19/4/14.
  */

/**
  * A producer application that generates random "product events", up to 5 per second, and sends them over a
  * network connection
  */
object StreamingProducer {

    def main(args: Array[String]) {

        val random = new Random()

        // Maximum number of events per second
        val MaxEvents = 6

        // Read the list of possible names
        val namesResource = this.getClass.getResourceAsStream("/names.csv")
        val names = scala.io.Source.fromInputStream(namesResource)
                .getLines()
                .toList
                .head
                .split(",")
                .toSeq

        // Generate a sequence of possible products
        val products = Seq(
            "iPhone Cover" -> 9.99,
            "Headphones" -> 5.49,
            "Samsung Galaxy Cover" -> 8.95,
            "iPad Cover" -> 7.49
        )

        /** Generate a number of random product events */
        def generateProductEvents(n: Int) = {
            (1 to n).map { i =>
                val (product, price) = products(random.nextInt(products.size))
                val user = random.shuffle(names).head
                (user, product, price)
            }
        }

        // create a network producer
        val listener = new ServerSocket(9999)
        println("Listening on port: 9999")

        while (true) {
            val socket = listener.accept()
            new Thread() {
                override def run = {
                    println("Got client connected from: " + socket.getInetAddress)
                    val out = new PrintWriter(socket.getOutputStream(), true)

                    while (true) {
                        Thread.sleep(1000)
                        val num = random.nextInt(MaxEvents)
                        val productEvents = generateProductEvents(num)
                        productEvents.foreach{ event =>
                            out.write(event.productIterator.mkString(","))
                            out.write("\n")
                        }
                        out.flush()
                        println(s"Created $num events...")
                    }
                    socket.close()
                }
            }.start()
        }
    }
}

/**
  * A simple Spark Streaming app in Scala
  */
object SimpleStreaming {

    def main(args: Array[String]) {

        val ssc = new StreamingContext("local[2]", "First Streaming App", Seconds(10))
        val stream = ssc.socketTextStream("localhost", 9999)

        // here we simply print out the first few elements of each batch
        stream.print()

        ssc.start()
        ssc.awaitTermination()

    }
}

/**
  * A more complex Streaming app, which computes statistics and prints the results for each batch in a DStream
  */
object StreamingAnalytics {

    def main(args: Array[String]) {

        val ssc = new StreamingContext("local[2]", "First Streaming App", Seconds(10))
        val stream = ssc.socketTextStream("localhost", 9999)

        // create stream of events from raw text elements
        val events = stream.map { record =>
            val event = record.split(",")
            (event(0), event(1), event(2))
        }

        /*
          We compute and print out stats for each batch.
          Since each batch is an RDD, we call forEeachRDD on the DStream, and apply the usual RDD functions
          we used in Chapter 1.
         */
        events.foreachRDD { (rdd, time) =>
            val numPurchases = rdd.count()
            val uniqueUsers = rdd.map { case (user, _, _) => user }.distinct().count()
            val totalRevenue = rdd.map { case (_, _, price) => price.toDouble }.sum()
            val productsByPopularity = rdd
                    .map { case (user, product, price) => (product, 1) }
                    .reduceByKey(_ + _)
                    .collect()
                    .sortBy(-_._2)
            val mostPopular = productsByPopularity(0)

            val formatter = new SimpleDateFormat
            val dateStr = formatter.format(new Date(time.milliseconds))
            println(time)
            println(s"== Batch start time: $dateStr ==")
            println("Total purchases: " + numPurchases)
            println("Unique users: " + uniqueUsers)
            println("Total revenue: " + totalRevenue)
            println("Most popular product: %s with %d purchases".format(mostPopular._1, mostPopular._2))
        }

        // start the context
        ssc.start()
        ssc.awaitTermination()

    }

}

object StreamingState {

    def updateState(prices: Seq[(String, Double)], currentTotal: Option[(Int, Double)]) = {
        val currentRevenue = prices.map(_._2).sum
        val currentNumberPurchases = prices.size
        val state = currentTotal.getOrElse((0, 0.0))
        Some((currentNumberPurchases + state._1, currentRevenue + state._2))
    }

    def main(args: Array[String]) {

        val ssc = new StreamingContext("local[2]", "First Streaming App", Seconds(10))
        // for stateful operations, we need to set a checkpoint location
        ssc.checkpoint("/tmp/sparkstreaming/")
        val stream = ssc.socketTextStream("localhost", 9999)

        // create stream of events from raw text elements
        val events = stream.map { record =>
            val event = record.split(",")
            (event(0), event(1), event(2).toDouble)
        }

        val users = events.map{ case (user, product, price) => (user, (product, price)) }
        val revenuePerUser = users.updateStateByKey(updateState)
        revenuePerUser.print()

        // start the context
        ssc.start()
        ssc.awaitTermination()

    }
}