package com.atguigu.gmall.controller;

/*
日志采集

@Slf4j: lombok注解， 辅助第三方记录日志框架
 */

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.util.Properties;

@RestController
@Slf4j
public class LoggerController {

    // KafkaTemplate 是SpringBoot提供的对kafka操作的类
    @Autowired
    KafkaTemplate kafkaTemplate;

    @RequestMapping("/applog")
    public String logger(@RequestParam("param") String jsonLog){
        // 2 落盘  logback
//        log.info(jsonLog);
        System.out.println(jsonLog);
        // 3 发送到kafka中
        // 3.1 Kafka配置信息

        /* 整体比较麻烦，使用SpringBoot
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "");
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"");
        KafkaProducer<String,String> producer = new KafkaProducer<String, String>(properties);

        // 可以指定分区编号，后者key(进行hash) 从而决定发送到哪一个分区
        ProducerRecord<String,String> record = new ProducerRecord<String,String>("topic",jsonLog);


        producer.send(record);
         */

        kafkaTemplate.send("ods_base_log",jsonLog);

        return "Success";
    }
}
