package com.bw.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.SHiTi.*;
import com.bw.util.GmallConfig;
import com.bw.util.KafkaUtil;
import com.bw.util.SinkHbaseUtil;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * @BelongsProject: work_new_order
 * @BelongsPackage: com.bw.dim
 * @Author: wuxiaopeng
 * @CreateTime: 2025-07-09  10:40
 * @Description: TODO
 * @Version: 1.0
 */
public class ods_to_dim {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//      读取Kafka的数据
        DataStreamSource<String> file = KafkaUtil.getKafkaSource(env, "new_work_order_ods_all_data", "asdf");
//      创建测流
        OutputTag<String> skuTag = new OutputTag<String>("skuTag") {};
        OutputTag<String> spuTag = new OutputTag<String>("spuTag") {};
        OutputTag<String> userTag = new OutputTag<String>("userTag") {};
        OutputTag<String> c1Tag = new OutputTag<String>("c1Tag") {};
        OutputTag<String> c2Tag = new OutputTag<String>("c2Tag") {};
        OutputTag<String> c3Tag = new OutputTag<String>("c3Tag") {};
        OutputTag<String> dicTag = new OutputTag<String>("dicTag") {};
        OutputTag<String> regionTag = new OutputTag<String>("regionTag") {};
        OutputTag<String> provinceTag = new OutputTag<String>("provinceTag") {};
        OutputTag<String> shopTag = new OutputTag<String>("shopTag") {};
//      筛选数据
        SingleOutputStreamOperator<Object> tohbase = file.process(new ProcessFunction<String, Object>() {
            @Override
            public void processElement(String s, ProcessFunction<String, Object>.Context context, Collector<Object> collector) throws Exception {
                JSONObject all = JSON.parseObject(s);
                String table = all.getJSONObject("source").getString("table");

                if ("sku_info".equals(table)) {
                    context.output(skuTag, all.getString("after"));
                }
                if ("spu_info".equals(table)) {
                    context.output(spuTag, all.getString("after"));
                }
                if ("user_info".equals(table)) {
                    context.output(userTag, all.getString("after"));
                }
                if ("base_category1".equals(table)) {
                    context.output(c1Tag, all.getString("after"));
                }
                if ("base_category2".equals(table)) {
                    context.output(c2Tag, all.getString("after"));
                }
                if ("base_category3".equals(table)) {
                    context.output(c3Tag, all.getString("after"));
                }
                if ("base_dic".equals(table)) {
                    context.output(dicTag, all.getString("after"));
                }
                if ("base_region".equals(table)) {
                    context.output(regionTag, all.getString("after"));
                }
                if ("base_province".equals(table)) {
                    context.output(provinceTag, all.getString("after"));
                }
                if ("shop".equals(table)) {
                    context.output(shopTag, all.getString("after"));
                }
            }
        });


        SideOutputDataStream<String> sku = tohbase.getSideOutput(skuTag);
        SideOutputDataStream<String> spu = tohbase.getSideOutput(spuTag);
        SideOutputDataStream<String> user = tohbase.getSideOutput(userTag);
        SideOutputDataStream<String> c1 = tohbase.getSideOutput(c1Tag);
        SideOutputDataStream<String> c2 = tohbase.getSideOutput(c2Tag);
        SideOutputDataStream<String> c3 = tohbase.getSideOutput(c3Tag);
        SideOutputDataStream<String> dic = tohbase.getSideOutput(dicTag);
        SideOutputDataStream<String> region = tohbase.getSideOutput(regionTag);
        SideOutputDataStream<String> province = tohbase.getSideOutput(provinceTag);
        SideOutputDataStream<String> shop = tohbase.getSideOutput(shopTag);


        SingleOutputStreamOperator<SkuInfo> skuST = sku.map(t -> JSON.parseObject(t, SkuInfo.class));
        SingleOutputStreamOperator<SpuInfo> spuST = spu.map(t -> JSON.parseObject(t, SpuInfo.class));
        SingleOutputStreamOperator<UserInfo> userST = user.map(t -> JSON.parseObject(t, UserInfo.class));
        SingleOutputStreamOperator<BaseCategory1> c1ST = c1.map(t -> JSON.parseObject(t, BaseCategory1.class));
        SingleOutputStreamOperator<BaseCategory2> c2ST = c2.map(t -> JSON.parseObject(t, BaseCategory2.class));
        SingleOutputStreamOperator<BaseCategory3> c3ST = c3.map(t -> JSON.parseObject(t, BaseCategory3.class));
        SingleOutputStreamOperator<BaseDic> dicST = dic.map(t -> JSON.parseObject(t, BaseDic.class));
        SingleOutputStreamOperator<BaseRegion> regionST = region.map(t -> JSON.parseObject(t, BaseRegion.class));
        SingleOutputStreamOperator<BaseProvince> provinceST = province.map(t -> JSON.parseObject(t, BaseProvince.class));
        SingleOutputStreamOperator<Shop> shopST = shop.map(t -> JSON.parseObject(t, Shop.class));


//        skuST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_sku(
            id bigint primary key,
            spu_id bigint,
            price decimal(10,0),
            sku_name varchar,
            tm_id bigint,
            category3_id bigint ,
            is_sale bigint
        );
         */
//        skuST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_sku (id,spu_id,price,sku_name,tm_id,category3_id,is_sale) values(?,?,?,?,?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setLong(2, book.getSpuId());
//                    statement.setDouble(3, book.getPrice());
//                    statement.setString(4, book.getSkuName());
//
//                    statement.setLong(5, book.getTmId());
//                    statement.setLong(6, book.getCategory3Id());
//                    statement.setLong(7, book.getIsSale());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));


//        spuST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_spu(
            id bigint primary key,
            spu_name varchar,
            category3_id bigint
        );
         */
//        spuST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_spu (id,spu_name,category3_id) values(?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getSpuName());
//                    statement.setLong(3, book.getCategory3Id());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));


//        userST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_user(
            id bigint primary key,
            name varchar,
            phone_num varchar,
            birthday varchar,
            gender varchar
         );
         */
//        userST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_user (id,name,phone_num,birthday,gender) values(?,?,?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getName());
//                    statement.setString(3, book.getPhoneNum());
//                    statement.setString(4, book.getBirthday());
//                    statement.setString(5, book.getGender());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));



//        c1ST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_c1(
            id bigint primary key,
            name varchar
        );
         */
//        c1ST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_c1 (id,name) values(?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getName());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));



//        c2ST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_c2(
            id bigint primary key,
            name varchar,
            category1_id bigint
        );
         */
//        c2ST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_c2 (id,name,category1_id) values(?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getName());
//                    statement.setLong(3, book.getCategory1Id());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));



//        c3ST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_c3(
            id bigint primary key,
            name varchar,
            category2_id bigint
        );
         */
//        c3ST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_c3 (id,name,category2_id) values(?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getName());
//                    statement.setLong(3, book.getCategory2Id());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));



//        dicST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_dic(
            dic_code varchar primary key,
            dic_name varchar,
            parent_code varchar
        );
         */
//        dicST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_dic (dic_code,dic_name,parent_code) values(?,?,?)",
//                (statement, book) -> {
//                    statement.setString(1, book.getDicCode());
//                    statement.setString(2, book.getDicName());
//                    statement.setString(3, book.getParentCode());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));




//        regionST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_region(
            id bigint primary key,
            region_name varchar
        );
         */
//        regionST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_region (id,region_name) values(?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getRegionName());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));



//        provinceST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_province(
            id bigint primary key,
            name varchar,
            region_id bigint,
            area_code varchar,
            iso_code varchar,
            iso31662 varchar
        );
         */
//        provinceST.addSink(JdbcSink.sink(
//                "upsert into new_work_order.dim_province (id,name,region_id,area_code,iso_code,iso31662) values(?,?,?,?,?,?)",
//                (statement, book) -> {
//                    statement.setLong(1, book.getId());
//                    statement.setString(2, book.getName());
//                    statement.setLong(3, book.getRegionId());
//                    statement.setString(4, book.getAreaCode());
//                    statement.setString(5, book.getIsoCode());
//                    statement.setString(6, book.getIso31662());
//                },
//                JdbcExecutionOptions.builder()
//                        .withBatchSize(1)
//                        .withBatchIntervalMs(200)
//                        .withMaxRetries(5)
//                        .build(),
//                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
//                        .withUrl(GmallConfig.PHOENIX_URL)
//                        .build()
//        ));


        shopST.print();
        /*
        create schema if not exists new_work_order;
        create table new_work_order.dim_shop(
            id bigint primary key,
            shop_name varchar
        );
         */
        shopST.addSink(JdbcSink.sink(
                "upsert into new_work_order.dim_shop (id,shop_name) values(?,?)",
                (statement, book) -> {
                    statement.setLong(1, book.getId());
                    statement.setString(2, book.getShopName());
                },
                JdbcExecutionOptions.builder()
                        .withBatchSize(1)
                        .withBatchIntervalMs(200)
                        .withMaxRetries(5)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl(GmallConfig.PHOENIX_URL)
                        .build()
        ));















        env.execute("work_order");
    }
}
