package kafkaToMysql.App;
import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.java.function.SQLFromFunction;
import com.streamxhub.streamx.flink.core.java.source.KafkaSource;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import com.streamxhub.streamx.flink.core.java.sink.JdbcSink;
import com.streamxhub.streamx.flink.core.scala.source.KafkaRecord;
import kafkaToMysql.bean.MyResult;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;


public class KafkaSimpleJavaApp {

    public static void main(String[] args) {
        StreamEnvConfig envConfig = new StreamEnvConfig(args, null);
        StreamingContext context = new StreamingContext(envConfig);
        DataStream<MyResult> source = new KafkaSource<String>(context)
                .getDataStream()
                .map(new MapFunction<KafkaRecord<String>, MyResult>() {
                    @Override
                    public MyResult map(KafkaRecord<String> stringKafkaRecord) {
                        String[] split = stringKafkaRecord.value().split(",");
                        return new MyResult(String.valueOf(split[0]),Integer.parseInt(split[1]));
                    }
                });
/*        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.enableCheckpointing(10000);
        env.setParallelism(1);
        env.setStateBackend(new HashMapStateBackend());
        //   env.getCheckpointConfig().setCheckpointStorage("hdfs://nameservice1/user/flink/checkpoint/"+checkpoint_dir);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        Table table = tEnv.fromDataStream(source);*/

        //写入到mysql中
        new JdbcSink<MyResult>(context).sql(new SQLFromFunction<MyResult>() {
            @Override
            public String from(MyResult bean) {
                return bean.toSql();
            }
        }).sink(source);

       // source.print();


        context.start();


    }
}
