package com.bigdata.ml;
import bean.SupplyProductEntity;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG;


public class MlKafkaTest {
   public static void main(String[] args){
	 //  String topic="test-topic";
	   SupplyProductEntity entity=new SupplyProductEntity();
	   entity.setProductId("1");
	   entity.setCompanyName("大数据公司");
	   entity.setProductInfo("大数据，机器学习，海量处理");
	   //还有好多字段，大家自己写一下
	   String brokers = "hadoop200:9092,hadoop201:9092,hadoop202:9092";
	   String  topic = "spark";
	   String group = "spark";
	   String  zk = "had101:2181,had102:2181,had103:2181";
	   String brokerList="had101:2181,had102:2181,had103:2181";
	   try {
/*
	   	            Map map=new HashMap();
		   BOOTSTRAP_SERVERS_CONFIG ="hadoop200:9092,hadoop201:9092,hadoop202:9092";
		   ConsumerConfig.GROUP_ID_CONFIG= group;
				   ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG= "org.apache.kafka.common.serialization.StringDeserializer";
				   ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG ="org.apache.kafka.common.serialization.StringDeserializer";
				String   "zookeeper.connect" =zk;

		   BOOTSTRAP_SERVERS_CONFIG



		   KafkaProducer  kafkaProducer=new KafkaProducer(brokerList, topic, entity);
		  kafkaProducer.producer(brokerList, topic, entity);*/
	} catch (Exception e) {
		// TODO Auto-generated catch block
		e.printStackTrace();
	}
   }
}
