package com.gukun.springboot.tdengine.service.backuprecover.backup;

import cn.hutool.core.lang.UUID;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.ExcelWriter;
import com.alibaba.excel.write.metadata.WriteSheet;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.gukun.springboot.tdengine.service.backuprecover.entity.Table;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.RateLimiter;

import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @author zhang
 */
public class BackupDbTableTask implements Runnable{

    private final String selectSql = "select DISTINCT tbname, ti,tn,ta FROM %s";
    private final String stableName;
    private final String dbName;
    private final HikariDataSource hikariDataSource;
    private final AtomicLong tableNum;
    private final String filePath;

    public BackupDbTableTask(String dbName, String stableName, HikariDataSource hikariDataSource, AtomicLong tableNum,String filePath) {
        this.dbName = dbName;
        this.stableName = stableName;
        this.hikariDataSource = hikariDataSource;
        this.tableNum = tableNum;
        this.filePath = filePath;
    }

    @Override
    public void run() {
        String realSql = String.format(selectSql, stableName);
        ResultSet rs = null;
        //一个avro最多存储50000条
        ArrayList<GenericRecord> tableList = new ArrayList<>(50000);
        Schema schema = getSchema(stableName);
        //限制处理速度，方式CPU占用太高
        RateLimiter rateLimiter = RateLimiter.create(20000);
        try(Connection connection = hikariDataSource.getConnection();
            Statement statement = connection.createStatement()){
            statement.execute("USE "+dbName);
            rs = statement.executeQuery(realSql);
            statement.setFetchSize(2500);
            GenericRecord table = null;
            String tableName = null;
            String tn = null;
            String ta = null;
            String ti = null;
            String desc = null;
            JSONObject jsonObject = null;
            while(rs.next()){
                rateLimiter.acquire();
                tableName = rs.getString("tbname");
                ti = rs.getString("ti");
                tn = rs.getString("tn");
                ta = rs.getString("ta");
                jsonObject = JSON.parseObject(ti);
                desc = jsonObject.getString("tagDesc");
                table = new GenericData.Record(schema);
                table.put("name", tableName);
                table.put("desc", desc);
                table.put("tn", tn);
                table.put("ta", ta);
                tableList.add(table);
                if(tableList.size() >= 50000){
                    writeData(tableList,schema);
                }
            }
            if(!tableList.isEmpty()){
                writeData(tableList,schema);
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new RuntimeException(e);
        }finally {
            try {
                if(rs != null){
                    rs.close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    public void writeData(ArrayList<GenericRecord> tableList,Schema schema) throws IOException {
        try(DataFileWriter<GenericRecord> dataFileWriter = createFile(schema)){
            for(GenericRecord table : tableList){
                try {
                    dataFileWriter.append(table);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        tableNum.getAndAdd(tableList.size());
        tableList.clear();
    }

    public void writeData1(ArrayList<Table> tableList){
        String fileName = filePath + UUID.fastUUID() + ".xlsx";
        try (ExcelWriter excelWriter = EasyExcel.write(fileName, Table.class).build()) {
            // 这里注意 如果同一个sheet只要创建一次
            WriteSheet writeSheet = EasyExcel.writerSheet("sheet1").build();
            // 分十次写入
            int start = 0;
            int end = 5000;
            List<Table> tempList = new ArrayList<>();
            for (int i = 0; i < 10; i++) {
                if(i == 0){
                    if(end > tableList.size()){
                        end = tableList.size();
                    }
                }else {
                    if(end == tableList.size()){
                        break;
                    }
                    start = end;
                    end = Math.min(end + 5000, tableList.size());
                }
                tempList.addAll(tableList.subList(start, end));
                excelWriter.write(tempList, writeSheet);
                tempList.clear();
            }
            excelWriter.finish();
        } catch (Exception e){
            e.printStackTrace();
        }finally {
            tableList.clear();
        }
    }

    /**
     * 获取表格对应avro Schema
     */
    public Schema getSchema(String stableName) {
        SchemaBuilder.RecordBuilder<Schema> recordBuilder = SchemaBuilder.record(stableName).namespace("com.sciyon");
        SchemaBuilder.FieldAssembler<Schema> fieldAssembler = recordBuilder.fields();
        fieldAssembler.name("name").type().stringType().noDefault();
        fieldAssembler.nullableString("desc",null);
        fieldAssembler.nullableString("tn",null);
        fieldAssembler.nullableString("ta",null);
        return fieldAssembler.endRecord();
    }

    public DataFileWriter<GenericRecord> createFile(Schema schema) throws IOException {
        String fileName = filePath + UUID.fastUUID() + ".avro";
        File file = new File(fileName);
        if (!file.getParentFile().exists()) {
            file.getParentFile().mkdirs();
        }
        return createDataFileWriter(file,schema);
    }

    public DataFileWriter<GenericRecord> createDataFileWriter(File file,Schema schema) throws IOException {
        // 创建DatumWriter，指定你的生成类
        DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema));
        dataFileWriter.setCodec(CodecFactory.snappyCodec());
        dataFileWriter.create(schema,file);
        return dataFileWriter;
    }
}
