package com.allen.flink.batch.sql;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.BatchTableEnvironment;
import org.apache.flink.types.Row;

/**
 * 功能: sql API
 *
 * @date: 2020-03-25 17:12
 * @author: Allen
 * @version: 0.0.4-snapshot
 * @Email: allenZyhang@163.com
 * @since: JDK 1.8
 **/
public class SQLAPI {
    
    public static void main(String[] args) throws Exception {
        //获取执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        
        //创建一个TableEnvironment
        BatchTableEnvironment tabEnv = BatchTableEnvironment.create(env);
        
        //读取数据源 订单编号 用户ID 支付状态 订单金额 支付类型 支付方式
        DataSet<String> ds = env.readTextFile("D:\\Flink\\order.txt");
        
        //转换
        DataSet<Tuple6<String, String, Integer, Double, Integer, Integer>> dsMap = ds.map(new MapFunction<String, Tuple6<String, String, Integer, Double, Integer, Integer>>() {
            @Override
            public Tuple6<String, String, Integer, Double, Integer, Integer> map(final String s) throws Exception {
                String[] arr = s.split(",");
                return new Tuple6<>(arr[0], arr[1], Integer.parseInt(arr[2]), Double.parseDouble(arr[3]), Integer.parseInt(arr[4]), Integer.parseInt(arr[5]));
            }
            
        });
        
        //将dsMap注册为Table
        tabEnv.registerDataSet("orders", dsMap, "orderNo,userId,orderStatus,goodsMoney,payType,payFrom");
        
        Table table = tabEnv.sqlQuery("select userId,sum(goodsMoney) as allMoney from orders where orderStatus=1 group by userId");
        
        DataSet<Row> result = tabEnv.toDataSet(table, Row.class);
        
        result.print();
    }
}
