package cn.kgc.gmall.controller;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.protocol.types.Field;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.util.Map;
import java.util.Properties;

@RestController
@Slf4j
public class LoggerController {

    // 注入kafka的模板类
    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    // http://192.168.1.128:8080/applog?param=jsonstr
    @RequestMapping("/applog")
    public String getLogger(@RequestParam("param") String jsonLog){
        // 数据输出到控制台
        // System.out.println(jsonLog);
        // 将数据落盘  slf4j  能将数据写出到日志文件中   日志监控数据
        log.info(jsonLog);
        // 将数据写出到kafka

        // 原生的写法
        /*Properties prop = new Properties();
        prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092,hadoop103:9092,hadoop104:9092");
        prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"");
        prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"");

        KafkaProducer<String,String> kafkaProducer = new KafkaProducer(prop);

        // 生产数据
        kafkaProducer.send(
                new ProducerRecord<>(
                        "jsonlog",
                        "param",
                        jsonLog
                )
        );*/

        // SpringBoot写法
        // 注意要设置默认四个分区
        kafkaTemplate.send("ods_base_log",jsonLog);

        return "success";
    }

}
