//package cn.getech.data.development.sink.jdbc.append
//
//import cn.getech.data.development.bean.jdbc.JDBCSqlParserResourceBean
//import cn.getech.data.development.utils.FlinkJDBCAnalysisUtils
//import org.apache.flink.api.common.typeinfo.TypeInformation
//import org.apache.flink.streaming.api.datastream.{DataStream, DataStreamSink}
//import org.apache.flink.table.api.TableSchema
//import org.apache.flink.table.sinks.{AppendStreamTableSink, TableSink}
//import org.apache.flink.table.utils.TableConnectorUtils
//import org.apache.flink.types.Row
//
//
//class FlinkCustomJDBCTableSink(model: JDBCSqlParserResourceBean) extends AppendStreamTableSink[Row] {
//
//  private val tableSchema: TableSchema = TableSchema.builder().fields(model.getFieldNames,model.getFieldTypes).build() //TableSchema.builder().fields().build()
//
//  override def consumeDataStream(dataStream: DataStream[Row]): DataStreamSink[_] = {
//    dataStream.addSink(FlinkJDBCAnalysisUtils.richSink(model))
//      .setParallelism(dataStream.getParallelism)
//      .name(TableConnectorUtils.generateRuntimeName(this.getClass, getFieldNames))
//  }
//
//  override def getTableSchema: TableSchema = tableSchema
//
//  override def emitDataStream(dataStream: DataStream[Row]): Unit = ???
//
//  override def configure(strings: Array[String], typeInformations: Array[TypeInformation[_]]): TableSink[Row] = ???
//}
