package app.dwd;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import utils.DimUtil;
import utils.DruidDSUtil;
import utils.MyKafkaUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.SQLException;
import java.util.Collections;
import java.util.concurrent.TimeUnit;



//DwdMarketProductProcessor - 商品事实表处理器
//        从 ods_market_product 读取数据
//        异步关联类目、品牌、店铺维度数据
//        输出到 dwd_market_product 主题
public class DwdMarketProductProcessor {
    
    private static final Logger logger = LoggerFactory.getLogger(DwdMarketProductProcessor.class);
    
    public static void main(String[] args) throws Exception {
        logger.info("Starting DwdMarketProductProcessor...");
        
        // 创建流式环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        
        // 从Kafka读取数据
        String topic = "ods_market_product";
        String groupId = "dwd_market_product_group";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        
        // 添加水位线
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(jsonStr -> {
            JSONObject jsonObject = JSONObject.parseObject(jsonStr);
            return jsonObject;
        }).assignTimestampsAndWatermarks(
            WatermarkStrategy.<JSONObject>forMonotonousTimestamps()
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                        return element.getLong("ts");
                    }
                })
        );
        
        // 异步关联类目维度
        SingleOutputStreamOperator<JSONObject> withCategoryDS = AsyncDataStream.unorderedWait(
            jsonObjDS,
            new RichAsyncFunction<JSONObject, JSONObject>() {
                private transient DruidDataSource dataSource;
                
                @Override
                public void open(org.apache.flink.configuration.Configuration parameters) throws Exception {
                    logger.info("Initializing Phoenix connection pool for category dimension...");
                    try {
                        dataSource = DruidDSUtil.createDataSource();
                        logger.info("Phoenix connection pool initialized successfully");
                    } catch (Exception e) {
                        logger.error("Failed to initialize Phoenix connection pool", e);
                        throw e;
                    }
                }
                
                @Override
                public void asyncInvoke(JSONObject product, ResultFuture<JSONObject> resultFuture) throws Exception {
                    String categoryId = product.getString("category_id");
                    
                    if (categoryId == null || categoryId.trim().isEmpty()) {
                        logger.warn("Category ID is null or empty, skipping dimension join");
                        resultFuture.complete(Collections.singleton(product));
                        return;
                    }
                    
                    DruidPooledConnection connection = null;
                    try {
                        connection = dataSource.getConnection();
                        JSONObject categoryDim = DimUtil.getDimInfo(connection, "dim_category_info", categoryId);
                        
                        if (categoryDim != null) {
                            product.put("category_name", categoryDim.getString("CATEGORY_NAME"));
                            product.put("category_level", categoryDim.getString("LEVEL"));
                            logger.debug("Successfully joined category dimension for categoryId: {}", categoryId);
                        } else {
                            logger.warn("No category dimension found for categoryId: {}", categoryId);
                        }
                        
                        resultFuture.complete(Collections.singleton(product));
                    } catch (Exception e) {
                        logger.error("Error joining category dimension for categoryId: {}", categoryId, e);
                        resultFuture.complete(Collections.singleton(product));
                    } finally {
                        if (connection != null) {
                            try {
                                connection.close();
                            } catch (SQLException e) {
                                logger.error("Error closing connection", e);
                            }
                        }
                    }
                }
                
                @Override
                public void close() throws Exception {
                    if (dataSource != null) {
                        dataSource.close();
                        logger.info("Phoenix connection pool closed");
                    }
                }
            },
            60, TimeUnit.SECONDS
        );
        
        // 异步关联品牌维度
        SingleOutputStreamOperator<JSONObject> withBrandDS = AsyncDataStream.unorderedWait(
            withCategoryDS,
            new RichAsyncFunction<JSONObject, JSONObject>() {
                private transient DruidDataSource dataSource;
                
                @Override
                public void open(org.apache.flink.configuration.Configuration parameters) throws Exception {
                    dataSource = DruidDSUtil.createDataSource();
                }
                
                @Override
                public void asyncInvoke(JSONObject product, ResultFuture<JSONObject> resultFuture) throws Exception {
                    String brandId = product.getString("brand_id");
                    
                    if (brandId == null || brandId.trim().isEmpty()) {
                        resultFuture.complete(Collections.singleton(product));
                        return;
                    }
                    
                    DruidPooledConnection connection = null;
                    try {
                        connection = dataSource.getConnection();
                        JSONObject brandDim = DimUtil.getDimInfo(connection, "dim_brand_info", brandId);
                        
                        if (brandDim != null) {
                            product.put("brand_name", brandDim.getString("BRAND_NAME"));
                            product.put("brand_country", brandDim.getString("COUNTRY"));
                        }
                        
                        resultFuture.complete(Collections.singleton(product));
                    } catch (Exception e) {
                        logger.error("Error joining brand dimension for brandId: {}", brandId, e);
                        resultFuture.complete(Collections.singleton(product));
                    } finally {
                        if (connection != null) {
                            try {
                                connection.close();
                            } catch (SQLException e) {
                                logger.error("Error closing connection", e);
                            }
                        }
                    }
                }
                
                @Override
                public void close() throws Exception {
                    if (dataSource != null) {
                        dataSource.close();
                    }
                }
            },
            60, TimeUnit.SECONDS
        );
        
        // 异步关联店铺维度
        SingleOutputStreamOperator<JSONObject> withShopDS = AsyncDataStream.unorderedWait(
            withBrandDS,
            new RichAsyncFunction<JSONObject, JSONObject>() {
                private transient DruidDataSource dataSource;
                
                @Override
                public void open(org.apache.flink.configuration.Configuration parameters) throws Exception {
                    dataSource = DruidDSUtil.createDataSource();
                }
                
                @Override
                public void asyncInvoke(JSONObject product, ResultFuture<JSONObject> resultFuture) throws Exception {
                    String shopId = product.getString("shop_id");
                    
                    if (shopId == null || shopId.trim().isEmpty()) {
                        resultFuture.complete(Collections.singleton(product));
                        return;
                    }
                    
                    DruidPooledConnection connection = null;
                    try {
                        connection = dataSource.getConnection();
                        JSONObject shopDim = DimUtil.getDimInfo(connection, "dim_shop_info", shopId);
                        
                        if (shopDim != null) {
                            product.put("shop_name", shopDim.getString("SHOP_NAME"));
                            product.put("shop_type", shopDim.getString("SHOP_TYPE"));
                        }
                        
                        resultFuture.complete(Collections.singleton(product));
                    } catch (Exception e) {
                        logger.error("Error joining shop dimension for shopId: {}", shopId, e);
                        resultFuture.complete(Collections.singleton(product));
                    } finally {
                        if (connection != null) {
                            try {
                                connection.close();
                            } catch (SQLException e) {
                                logger.error("Error closing connection", e);
                            }
                        }
                    }
                }
                
                @Override
                public void close() throws Exception {
                    if (dataSource != null) {
                        dataSource.close();
                    }
                }
            },
            60, TimeUnit.SECONDS
        );
        
        // 将结果写入Kafka
        withShopDS.map(new MapFunction<JSONObject, String>() {
            @Override
            public String map(JSONObject jsonObject) throws Exception {
                return jsonObject.toJSONString();
            }
        }).addSink(MyKafkaUtil.getFlinkKafkaProducer("dwd_market_product"));
        
        // 执行任务
        env.execute("Product Fact Table Processing Job");
    }
}