package com.shujia.real;

import com.alibaba.alink.common.MLEnvironmentFactory;
import com.alibaba.alink.operator.stream.StreamOperator;
import com.alibaba.alink.pipeline.PipelineModel;
import com.shujia.common.Config;
import com.shujia.map.SentMapFunaction;
import com.shujia.sink.HbaseMapper;
import com.shujia.sink.HbaseSink;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;
import org.apache.hadoop.hbase.client.Put;

import java.text.ParseException;
import java.text.SimpleDateFormat;


public class ComputeSentimentTrendApp {

    public static void main(String[] args) throws Exception {

        //当使用alink需要通过这个方式创建flink环境
        StreamTableEnvironment tableEnv = MLEnvironmentFactory.getDefault().getStreamTableEnvironment();


        //连接kafka读取数据
        tableEnv.connect(
                new Kafka()
                        .version("universal")
                        .topic("com")
                        .startFromEarliest()
                        .property("zookeeper.connect", Config.getString("kafka.zookeeper.connect"))
                        .property("bootstrap.servers", Config.getString("kafka.bootstrap.servers"))
                        .property("group.id", "asdwasdssadas"))
                .withFormat(
                        new Csv().fieldDelimiter('|'))
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("sentId", DataTypes.STRING())
                        .field("date", DataTypes.STRING())
                        .field("likeCount", DataTypes.STRING())
                        .field("userId", DataTypes.STRING())
                        .field("text", DataTypes.STRING()))
                .inAppendMode()
                .createTemporaryTable("t_comment");


        //加载模型
        PipelineModel model = PipelineModel.load("data/model");

        //实时预测
        Table comment = model.transform(tableEnv.from("t_comment"));


        //取出预测结果
        Table select = comment.select("sentId,date,prediction,predictionDetail");

        DataStream<Row> rowDataStream = tableEnv.toAppendStream(select, Row.class);

        //整理数据格式
        SingleOutputStreamOperator<Tuple4<String, String, Double, Integer>> map =
                rowDataStream.map(new SentMapFunaction());

        //统计每个舆情每小时的正负的数量
        SingleOutputStreamOperator<Tuple4<String, String, Double, Integer>> result = map
                .keyBy(0, 1, 2)
                .sum(3);


        HbaseMapper<Tuple4<String, String, Double, Integer>> hbaseMapper = new HbaseMapper<Tuple4<String, String, Double, Integer>>() {
            @Override
            public Put mapper(Tuple4<String, String, Double, Integer> value) {
                Put put = new Put(value.f0.getBytes());
                SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd:hh");
                try {
                    long ts = format.parse(value.f1).getTime();
                    put.add("info".getBytes(), value.f2.toString().getBytes(), ts, value.f3.toString().getBytes());
                } catch (ParseException e) {
                    e.printStackTrace();
                }
                return put;
            }
        };


        HbaseSink<Tuple4<String, String, Double, Integer>> hbaseSink = new HbaseSink<>(
                "comment_sentiment",
                hbaseMapper,
                Config.getString("hbase.zookeeper.quorum"));


        result.print();

        //数保存到hbase
        result.addSink(hbaseSink);

        select.printSchema();

        //alink程序启动方式
        StreamOperator.execute();

    }
}
