package com.atguigu.gmall.realtime.sink;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.bean.TableProcess;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.JdbcUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;

/**
 * @Author lzc
 * @Date 2022/4/17 14:28
 */
public class PhoenixSink extends RichSinkFunction<Tuple2<JSONObject, TableProcess>> {
    private Connection conn;
    
    // 先建立到Phoenix的jdbc连接, 然后如果是第一条建表
    @Override
    public void open(Configuration parameters) throws Exception {
        // 建立到Phoenix的连接
        String driver = Constant.PHOENIX_DRIVER;
        String url = Constant.PHOENIX_URL;
        conn = JdbcUtil.getJdbcConnection(driver, url, null, null);
        
        
    }
    
    @Override
    public void close() throws Exception {
        // 关闭连接
        if (conn != null && !conn.isClosed()) {
            conn.close();
        }
    }
    
    // 每来一条数据, 方法触发一次
    @Override
    public void invoke(Tuple2<JSONObject, TableProcess> value,
                       Context context) throws Exception {
        
        writeToPhoenix(value);
        
        
        // 3. 更新缓存中的维度信息
        //        updateCache(value);
    }
    
    private void updateCache(Tuple2<JSONObject, TableProcess> value) {
        
        
        /*JSONObject data = value.f0;  // 从mysql来, 所以里面的字段名都是小写
        TableProcess tp = value.f1;
        
        String key = tp.getSink_table() + ":" + data.getLong("id");
        // 这次是update, 并且在缓存中有这条维度数据, 才需要操作
        if ("update".equals(tp.getOperate_type()) && redisClient.exists(key)) {
            // 1. 优雅: 更新
            JSONObject dataUpper = new JSONObject();
            for (Map.Entry<String, Object> entry : data.entrySet()) {
                String k = entry.getKey().toUpperCase();
                Object v = entry.getValue();
                dataUpper.put(k, v);
            }
            redisClient.setex(key, 2 * 24 * 60 * 60, dataUpper.toJSONString());
            
            // 2. 粗暴: 删除
    
//            redisClient.del(key);
        }*/
    }
    
    private void writeToPhoenix(Tuple2<JSONObject, TableProcess> value) throws SQLException {
        // upsert into user_info(id, name, age)values(?,?,?)
        JSONObject data = value.f0;
        TableProcess tp = value.f1;
        
        StringBuilder sql = new StringBuilder();
        sql
            .append("upsert into ")
            .append(Constant.PHOENIX_SCHEMA)
            .append(".")
            .append(tp.getSink_table())
            .append("(")
            .append(tp.getSink_columns())
            .append(")values(")
            .append(tp.getSink_columns().replaceAll("[^,]+", "?"))
            .append(")");
        System.out.println("插入语句: " + sql.toString());
        
        PreparedStatement ps = conn.prepareStatement(sql.toString());
        // 给sql中的占位符赋值 TODO
        // 根据字段名, 取出对应的字段值
        String[] columns = tp.getSink_columns().split(",");
        for (int i = 0; i < columns.length; i++) {
            String columnName = columns[i];
            Object v = data.get(columnName);
            // v 有可能是null: 因为在mysql有些字段的值是null
            ps.setString(i + 1, v == null ? null : v.toString());   // v== null   -> "null"
        }
        
        ps.execute();
        conn.commit();
        ps.close();
        
    }
    
}
