//package util;
//
//import model.CrawlerLog;
//import org.apache.kafka.clients.producer.KafkaProducer;
//import org.apache.kafka.clients.producer.Producer;
//import org.apache.kafka.clients.producer.ProducerRecord;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//
//import java.sql.PreparedStatement;
//import java.sql.SQLException;
//import java.sql.Statement;
//import java.util.Date;
//import java.util.Properties;
//
//public class LogToKafka {
//    private static final String HOST = Config.getCrawler_KAFKA_SERVER();
//    private static final String PREFIX = Config.getCrawler_PREFIX();
//
//    private Logger logger = LoggerFactory.getLogger(getClass());
//    private static Producer<String, String> producer;
//    public LogToKafka(){
//        Properties props = new Properties();
//        props.put("bootstrap.servers", HOST);
//        props.put("acks", "all");
//        props.put("retries", 0);
//        props.put("batch.size", 16384);
//        props.put("linger.ms", 1);
//        props.put("buffer.memory", 33554432);
//        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//
//        producer = new KafkaProducer<>(props);
//    }
//    public void log(CrawlerLog crawlerLog){
//        String body = null;
//        try {
//            body = String.valueOf(JSONUtil.objectToJson(crawlerLog));
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
////        System.out.println("Send->[" + body + "]");
//        producer.send(new ProducerRecord<String, String>(PREFIX+"."+"crawler_"+"CrawlerLog", String.valueOf(new Date().getTime()), body));
//
//    }}
