package app.dim;

import com.alibaba.fastjson.JSONObject;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import utils.MyKafkaUtil;
import utils.MyPhoenixSink;

/**
 * 店铺维度表处理器
 */
public class DimShopProcessor {
    
    public static void main(String[] args) throws Exception {
        // 创建流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        
        // 从Kafka读取数据
        String topic = "ods_market_shop";
        String groupId = "dim_shop_group";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        
        // 转换数据格式
        kafkaDS.map(jsonStr -> {
            JSONObject jsonObject = JSONObject.parseObject(jsonStr);
            JSONObject dimJson = new JSONObject();
            dimJson.put("sinkTable", "dim_shop_info");
            
            JSONObject data = new JSONObject();
            data.put("id", jsonObject.getString("shop_id"));
            data.put("shop_name", jsonObject.getString("shop_name"));
            data.put("shop_type", jsonObject.getString("shop_type"));
            data.put("avg_price", jsonObject.getDouble("avg_price"));
            data.put("follower_count", jsonObject.getInteger("follower_count"));
            data.put("create_time", jsonObject.getString("ts"));
            
            dimJson.put("data", data);
            return dimJson;
        }).addSink(new MyPhoenixSink());
        
        // 执行任务
        env.execute("Shop Dimension Processing Job");
    }
}