//package spark.kafka
//
//import java.io.{ByteArrayInputStream, ObjectInputStream}
//import java.util.Properties
//
//import kafka.consumer._
//import kafka.message.MessageAndMetadata
//import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
//import org.apache.kafka.common.serialization.StringSerializer
//
//import scala.actors.Actor
//import scala.collection.mutable
//
///**
//  * 消费者，接收消息
//  */
//class KafakaObjectConsumer(topic: String) extends Actor{
//	var consumer: ConsumerConnector = _
//
//	/**
//	  * 初始化的方法
//	  */
//	def init(): KafakaObjectConsumer = {
//		val properties = new Properties()
//		properties.put("zookeeper.connect","nn1.hadoop:2181,nn2.hadoop:2181,s1.hadoop:2181")
//		properties.put("group.id","my_group1")
//	    properties.put("zookeeper.session.timeout.ms","60000")
//		consumer = Consumer.create(new ConsumerConfig(properties))
//		this
//	}
//
//	override def act(): Unit = {
//		var topicConfig = new mutable.HashMap[String,Int]()
//		topicConfig += (topic -> 1)
//		val message: collection.Map[String, List[KafkaStream[Array[Byte], Array[Byte]]]] = consumer.createMessageStreams(topicConfig)
//		val kafkaStream: KafkaStream[Array[Byte], Array[Byte]] = message.get(topic).get(0)
//		val iter: ConsumerIterator[Array[Byte], Array[Byte]] = kafkaStream.iterator()
//		while (iter.hasNext()){
//			val messages: MessageAndMetadata[Array[Byte], Array[Byte]] = iter.next()
//			val bytes: Array[Byte] = messages.message()
//			val byteArrayInputStream = new ByteArrayInputStream(bytes)
//			val objectInputStream = new ObjectInputStream(byteArrayInputStream)
//			val updateStatus: ValueUpdateStatus1 = objectInputStream.readObject().asInstanceOf[ValueUpdateStatus1]
//			println(s"接收到的是：${updateStatus}")
//			Thread.sleep(1000)
//		}
//
//	}
//}
//object KafakaObjectConsumer{
//	// 调用 init 方法
//	def apply(topic: String): MyKafkaConsumer = new MyKafkaConsumer(topic).init()
//}
//
//
///**
//  * 生产者，用于发送消息
//  */
//class KafakaObjectProduct(val topic:String) extends  Actor {
//	var producer:KafkaProducer[String,ValueUpdateStatus1] = _
//	def init(): KafakaObjectProduct = {
//		val properties = new Properties()
//		properties.put("bootstrap.servers", "nn1.hadoop:9092,nn2.hadoop:9092,s1.hadoop:9092")
//		properties.put("key.serializer", classOf[MySerializer].getName())
//		properties.put("value.serializer", classOf[MySerializer].getName())
//		producer = new KafkaProducer[String,ValueUpdateStatus1](properties)
//		this
//	}
//	override def act(): Unit = {
//		var num = 1
//		while (true){
//			val stringMessage = new String(s"测试发送数字：${num}")
//			producer.send(new ProducerRecord[String,ValueUpdateStatus1](this.topic,ValueUpdateStatus1(num)))
//			num += 1
//			if(num > 10) num = 0
//			Thread.sleep(3000)
//		}
//	}
//}
//object KafakaObjectProduct{
//	def apply(topic:String): KafakaObjectProduct = new KafakaObjectProduct(topic).init()
//}
//
///**
//  * 测试类
//  */
//object TestKafka{
//	def main(args: Array[String]): Unit = {
//		val topic = "zhangjian_test_object"
//		val product = KafakaObjectProduct(topic)
//		val consumer = KafakaObjectConsumer(topic)
//		consumer.start()
//		product.start()
//
//	}
//}
//
//case class ValueUpdateStatus1(var value:Int,var isUpdate:Boolean = false){
//	override def toString: String = {s"value:${value},${isUpdate}"}
//}
