package com.bw.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.util.Collector;

import java.util.Random;
import java.util.concurrent.TimeUnit;

/*
1.获取ods 中日志数topic_log
2.处理过滤topic_log
3.对新老用户进行处理
               is_news:1
                         状态=null  我要把当前人ts转换成年月日  跟新到状态值
                         状态！=null   判断当前数据的ts 和状态日期是否相同  如果不同跟新数据
               is_new:0
                         状态=null    将ts-1day 跟新到状态中
                         如果有值 不需要处理
4. 把主流数据拆分成5个流   分别存入到5个主题中
1.创建流式环境
2.设置并行度
3.运行jar 向topic_log主题 发送数据
4.从 Kafka 读取主流数据
5.数据清洗，转换结构
6.将脏数据写出到 Kafka 指定主题
 */
public class BaseLogAppShop {
    public static void main(String[] args) throws Exception {

        // TODO 1. 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // TODO 2. 启用状态后端
        DataStreamSource<String> dataStreamSource = env.readTextFile("datas/app.2024-10-30.log");
        SingleOutputStreamOperator<JSONObject> flatMapStrToJSON = dataStreamSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObj = JSON.parseObject(value);
                JSONObject common = jsonObj.getJSONObject("common");
                common.put("shop_id",new Random().nextInt(10)+1 );
                int i = new Random().nextInt(2);
                common.put("device_type",(i==0?0:1));
                out.collect(jsonObj);
            }
        });


        SingleOutputStreamOperator<String> flatMapToStr = flatMapStrToJSON.map(new MapFunction<JSONObject, String>() {
            @Override
            public String map(JSONObject value) throws Exception {
                return value.toJSONString();
            }
        });



        StreamingFileSink<String> fileSink = StreamingFileSink
                .<String>forRowFormat(new Path("app_shop_log"), new SimpleStringEncoder<>("UTF-8"))
                .withRollingPolicy(
                        DefaultRollingPolicy.builder()
                                .withRolloverInterval(TimeUnit.MINUTES.toMillis(15)
                                )
                                .withInactivityInterval(TimeUnit.MINUTES.toMillis(5
                                ))
                                .withMaxPartSize(1024 * 1024 * 1024)
                                .build())
                .build();
        flatMapToStr.addSink(fileSink);
        env.execute();
    }
}
