package com.fwmagic.flink.projectcase.queryactivitycase;

import com.fwmagic.flink.projectcase.queryactivitycase.bean.ActivityBean;
import com.fwmagic.flink.projectcase.queryactivitycase.map.DataToActivityBeanMapFunction;
import com.fwmagic.flink.projectcase.queryactivitycase.utils.FlinkUtilsV1;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 准备发送到kafka的数据：
 * u001,A1,2019-09-02 10:10:11,1,北京市
 * u002,A2,2019-08-03 10:11:11,2,辽宁市
 * u003,A3,2019-06-08 10:11:11,3,河北市
 *
 * mysql中准备数据：
 * use flink-test;
 * CREATE TABLE `t_activities` (
 *   `id` varchar(10) NOT NULL,
 *   `name` varchar(50) DEFAULT NULL,
 *   `last_update` datetime DEFAULT NULL,
 *   PRIMARY KEY (`id`)
 * ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
 *
 * insert into `flink-test`.`t_activities` ( `id`, `name`, `last_update`) values ( 'A1', '新人礼包', '2020-03-02 16:42:09');
 * insert into `flink-test`.`t_activities` ( `id`, `name`, `last_update`) values ( 'A2', '月末活动', '2020-03-02 16:43:32');
 * insert into `flink-test`.`t_activities` ( `id`, `name`, `last_update`) values ( 'A3', '周末活动', '2020-03-02 16:43:44');
 *
 * 打印结果：
 * 7> ActivityBean{uid='u001', aid='A1', activetyName='新人礼包', time='2019-09-02 10:10:11', eventType=1, province='北京市'}
 * 6> ActivityBean{uid='u002', aid='A2', activetyName='月末活动', time='2019-08-03 10:11:11', eventType=2, province='辽宁市'}
 * 5> ActivityBean{uid='u003', aid='A3', activetyName='周末活动888', time='2019-06-08 10:11:11', eventType=3, province='河北市'}
 */
public class QueryActivityNameApplication {
    public static void main(String[] args) throws Exception{

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //获取kafkaSource
        DataStream<String> kafkaSource = FlinkUtilsV1.createKafkaSource(args, new SimpleStringSchema(),env);

        //map操作数据
        SingleOutputStreamOperator<ActivityBean> beans = kafkaSource.map(new DataToActivityBeanMapFunction());

        beans.print();

        env.execute("QueryActivityNameApplication");

    }
}
