package com.ylh;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ylh.bean.PtVisitCnt;
import com.ylh.bean.Shop;
import com.ylh.untils.MyKafkaUtil;
import com.ylh.untils.SinkClickHouse;
import com.ylh.untils.SinkHbase;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStream;;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

public class FlinkDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1)	创建flink执行环境，设置并行度为1
        env.setParallelism(1);

        //2)	从文件user-log中读取数据
        //3)	分析行为信息将页面日志（含有page字段）的记录进行筛选
        String topic="ods_base_log";
        String groupId="alog";
        DataStream<String> dataStream = env.addSource(MyKafkaUtil.getKafkaConsumer(topic,groupId));
        //3)分析行为信息将页面日志（含有page字段）的记录进行筛选
        SingleOutputStreamOperator<JSONObject> page = dataStream.map(p -> JSON.parseObject(p)).filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject json) throws Exception {
                boolean page = json.containsKey("page");

                return page;
            }
        });
        SingleOutputStreamOperator<Shop> pagestream = page.map(s -> {
            JSONObject common = s.getJSONObject("common");
            Shop shop = JSON.parseObject(common.toString(), Shop.class);
            shop.setPage_id(s.getJSONObject("page").getString("page_id"));
            shop.setDuring_time(s.getJSONObject("page").getLong("during_time"));
            shop.setLast_page_id(s.getJSONObject("page").getString("last_page_id"));
            shop.setTs(s.getLong("ts"));
            return shop;
        });

        tableEnv.createTemporaryView("page",pagestream);
//        tableEnv.executeSql("select * from page").print();

        //4)	根据筛选出的数据计算每个页面的访问量
        Table table4 = tableEnv.sqlQuery("" +
                "select page_id,cast(count(1) as int) as cnt from page group by page_id");
//        table4.execute().print();

        //5)	根据mid字段计算每个页面的独立访客数
        Table table5 = tableEnv.sqlQuery("" +
                "select page_id,cast(count(1) as int) as cnt from page group by mid,page_id");
//        table5.execute().print();

        //6)	根据os字段计算安卓和ios的页面浏览量
        Table table6 = tableEnv.sqlQuery("" +
                "select page_id,cast(substring(os,0,3) as string) as pt,cast(count(1) as int) as cnt from page group by substring(os,0,3),page_id");

        //7)	将ios的数据 和安卓的数据分别侧流到不同的kafka 主题上
        Table iOS = tableEnv.sqlQuery("select * from page where os like concat ('%','iOS','%')");
        Table And = tableEnv.sqlQuery("select * from page where os like concat ('%','And','%')");

        SingleOutputStreamOperator<Row> IOS = tableEnv.toRetractStream(iOS, Row.class).filter(x -> x.f0).map(x -> x.f1);
        SingleOutputStreamOperator<Row> AND = tableEnv.toRetractStream(And, Row.class).filter(x -> x.f0).map(x -> x.f1);

        SingleOutputStreamOperator<String> process1 = IOS.process(new ProcessFunction<Row, String>() {
            @Override
            public void processElement(Row value, ProcessFunction<Row, String>.Context ctx, Collector<String> out) throws Exception {
                out.collect(value.toString());
            }
        });
        SingleOutputStreamOperator<String> process2 = AND.process(new ProcessFunction<Row, String>() {
            @Override
            public void processElement(Row value, ProcessFunction<Row, String>.Context ctx, Collector<String> out) throws Exception {
                out.collect(value.toString());
            }
        });
        process1.print();
        String ios="IOS";
        String and="AND";
        String groupId1="alog";
        FlinkKafkaProducer<String> kafkaProducer = MyKafkaUtil.getKafkaProducer(ios);
        FlinkKafkaProducer<String> kafkaProducer1 = MyKafkaUtil.getKafkaProducer(and);
//        process1.addSink(kafkaProducer);
//        process2.addSink(kafkaProducer1);

        //8)	将题4）的结果落盘到mysql中
//        SingleOutputStreamOperator<PageVisit> mysql = tableEnv.toRetractStream(table4, PageVisit.class).filter(x -> x.f0).map(x -> x.f1);
//
//        mysql.addSink(new RichSinkFunction<PageVisit>() {
//            Connection conn = null;
//            PreparedStatement ps =null;
//            @Override
//            public void open(Configuration parameters) throws Exception {
////                super.open(parameters);
//                Class.forName("com.mysql.jdbc.Driver");
//                conn = DriverManager.getConnection("jdbc:mysql://192.168.206.129:3306/2008b?characterEncoding=utf8","root", "root");
//                ps = conn.prepareStatement("insert into page_visit values (?,?)");
//            }
//
//            @Override
//            public void close() throws Exception {
////                super.close();
//                ps.close();
//                conn.close();
//            }
//
//            @Override
//            public void invoke(PageVisit value, Context context) throws Exception {
////                super.invoke(value, context);
//                ps.setString(1,value.getPage_id());
//                ps.setInt(2,value.getCnt());
//                ps.executeUpdate();
//            }
//        });
        //9)	将题5）的结果落盘到redis中
//        SingleOutputStreamOperator<PageVisit> redis = tableEnv.toRetractStream(table5, PageVisit.class).filter(x -> x.f0).map(x -> x.f1);
//        redis.addSink(new RichSinkFunction<PageVisit>() {
//            JedisPool pool = null;
//            Jedis resourse = null;
//            @Override
//            public void open(Configuration parameters) throws Exception {
//                super.open(parameters);
//                pool = new JedisPool("192.168.206.129",6379);
//                resourse = pool.getResource();
//            }
//
//            @Override
//            public void close() throws Exception {
//                super.close();
//                pool.close();
//            }
//
//            @Override
//            public void invoke(PageVisit value, Context context) throws Exception {
//                super.invoke(value, context);
//                resourse.set(value.getPage_id().toString(),value.getCnt().toString());
//            }
//        });
        //10)	6）中的数据落地到hbase 当中
        SingleOutputStreamOperator<PtVisitCnt> hbase = tableEnv.toRetractStream(table6, PtVisitCnt.class).filter(x -> x.f0).map(x -> x.f1);
        hbase.addSink(new SinkHbase());
        //11)	6 中的数据落地到clickhouse 当中
        SingleOutputStreamOperator<PtVisitCnt> clickhouse = tableEnv.toRetractStream(table6, PtVisitCnt.class).filter(x -> x.f0).map(x -> x.f1);
        clickhouse.addSink(new SinkClickHouse());
        env.execute();
    }
}
