package app.dim;

import com.alibaba.fastjson.JSONObject;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import utils.MyKafkaUtil;
import utils.MyPhoenixSink;

/**
 * 类目维度表处理器
 */
public class DimCategoryProcessor {
    
    public static void main(String[] args) throws Exception {
        // 创建流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        
        // 从Kafka读取数据
        String topic = "ods_market_category";
        String groupId = "dim_category_group";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        
        // 转换数据格式
        kafkaDS.map(jsonStr -> {
            JSONObject jsonObject = JSONObject.parseObject(jsonStr);
            JSONObject dimJson = new JSONObject();
            dimJson.put("sinkTable", "dim_category_info");
            
            JSONObject data = new JSONObject();
            data.put("id", jsonObject.getString("category_id"));
            data.put("category_name", jsonObject.getString("category_name"));
            data.put("parent_id", jsonObject.getString("parent_id"));
            data.put("level", jsonObject.getInteger("level"));
            data.put("product_count", jsonObject.getInteger("product_count"));
            data.put("create_time", jsonObject.getString("ts"));
            
            dimJson.put("data", data);
            return dimJson;
        }).addSink(new MyPhoenixSink());
        
        // 执行任务
        env.execute("Category Dimension Processing Job");

    }
}