/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package p1source;

import ch01.FraudDetector;
import entity.Dept;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.MapFunction;

import org.apache.flink.api.java.functions.FormattingMapper;
import org.apache.flink.connector.file.src.FileSource;
import org.apache.flink.connector.file.src.reader.TextLineInputFormat;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import org.apache.flink.walkthrough.common.entity.Alert;
import org.apache.flink.walkthrough.common.entity.Transaction;
import org.apache.flink.walkthrough.common.sink.AlertSink;
import org.apache.flink.walkthrough.common.source.TransactionSource;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.time.Duration;

import java.util.Arrays;

/**
 * Skeleton code for the datastream walkthrough
 */
public class FraudDetectionJob {

    public static void main4(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 需要定义两条流，有相同的key
        SingleOutputStreamOperator<Event> stream1 = env.fromElements(
                new Event("Alice", "./home", 1000L),
                new Event("Bob", "./cart", 2000L),
                new Event("Alice", "./cart", 3000L),
                new Event("Alice", "./fav", 8000L),
                new Event("Bob", "./home", 15000L),
                new Event("Cary", "./prod?id=1", 16000L)
        ).assignTimestampsAndWatermarks(WatermarkStrategy.<Event>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((element, recordTimestamp) -> element.timestamp)
        );

        SingleOutputStreamOperator<Tuple3<String, Integer, Long>> stream2 = env.fromElements(
                Tuple3.of("Alice", 35, 2000L),
                Tuple3.of("Bob", 20, 8000L),
                Tuple3.of("Alice", 17, 9000L),
                Tuple3.of("Bob", 20, 12000L),
                Tuple3.of("Mary", 50, 17000L)
        ).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple3<String, Integer, Long>>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner((element, recordTimestamp) -> element.f2)
        );

        // 调用DataStream的.join()方法来合并两条流，得到一个JoinedStreams
        stream1.join(stream2)
                // 通过where和equalTo方法指定两条流中联结的key
                .where(value -> value.user)  // 用来指定第一条流中的key
                .equalTo(value -> value.f0)  // 指定了第二条流中的key
                // 通过windos开窗口
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                // 调用apply传入联结窗口函数进行处理计算
                .apply(new JoinFunction<Event, Tuple3<String, Integer, Long>, String>() {
                    @Override
                    public String join(Event first, Tuple3<String, Integer, Long> second) throws Exception {
                        return first + " --> " + second;
                    }
                })
                .print();
        env.execute();
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        SingleOutputStreamOperator<String> l = env.fromCollection(Arrays.asList("1", "2", "3", "4", "5", "6"))
                .assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner((element, recordTimestamp) -> element.length())
                );
        ;
        SingleOutputStreamOperator<String> r = env.fromCollection(Arrays.asList("a", "bb", "ccc", "dddd", "eee", "fff", "gggg"))
                .assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner((element, recordTimestamp) -> element.length())
                );
        l.join(r)
                .where(s -> Integer.valueOf(s))
                .equalTo(s -> s.length())
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .apply(new JoinFunction<String, String, String>() {
                    @Override
                    public String join(String left, String right) throws Exception {
                        return left + "   ---->   " + right;
                    }
                })
                .print().setParallelism(1);
        env.execute();
        System.out.println("hello world");
    }

    public static void main1(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

//fromCollection：从本地集合读取数据
//        env.fromCollection(Arrays.asList("1,张三", "2,李四", "3,王五", "4,赵六"))
//                .print();
        FileSource<String> source = FileSource.forRecordStreamFormat(
                new TextLineInputFormat(),
                new Path("D:\\bigdata\\m5-flink\\input\\words.csv")
        ).build();

        env.fromSource(source, WatermarkStrategy.noWatermarks(), "file source")
                .flatMap(new MyFlatMapFunction())
                .print();


//        将DataSet中的每一个元素转换为另外一个元素
//        env.fromSource(source, WatermarkStrategy.noWatermarks(), "file source")
//                .map(new MyMapFunction())
//                .print();

        env.execute("Fraud Detection");
    }

    public static class MyFlatMapFunction implements FlatMapFunction<String, Dept> {
        @Override
        public void flatMap(String value, Collector<Dept> out) throws Exception {
            String[] fields = value.split(",");
            int i = Integer.valueOf(fields[0]);
            for (int j = 0; j < i; j++) {
                out.collect(new Dept(i, fields[1], fields[2]));
            }

        }
    }


    public static class MyMapFunction implements MapFunction<String, Dept> {
        @Override
        public Dept map(String line) throws Exception {
            String[] fields = line.split(",");
            return new Dept(Integer.valueOf(fields[0]), fields[1], fields[2]);

        }
    }

    @Data
    @ToString
    @AllArgsConstructor
    @NoArgsConstructor
    public static class Event {
        public String user;
        public String url;
        public Long timestamp;
    }
}
