package cn.ymatrix.jdbcToYMatrix;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.example.GroupReadSupport;

import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

public class ParquetToPostgresMultiThreaded {
  public static void main(String[] args) {
    String url = "jdbc:postgresql://localhost:5432/dbname";
    String user = "username";
    String password = "password";
    int threadCount = 5; // Number of threads to use

    ExecutorService executor = Executors.newFixedThreadPool(threadCount);

    try (Connection con = DriverManager.getConnection(url, user, password)) {
      String query =
          "INSERT INTO your_table_name (column1, column2, ..., columnN) VALUES (?, ?, ..., ?)";
      File parquetFile = new File("path_to_parquet_file");

      Configuration conf = new Configuration();
      Path path = new Path(parquetFile.toURI());
      GroupReadSupport readSupport = new GroupReadSupport();
      ParquetReader<Group> reader = ParquetReader.builder(readSupport, path).withConf(conf).build();

      Group rowGroup;
      while ((rowGroup = reader.read()) != null) {
        final Group group = rowGroup;
        executor.submit(
            () -> {
              try {
                PreparedStatement pst = con.prepareStatement(query);
                // Extract data from rowGroup and set parameters in the statement
                pst.setString(1, group.getString(0, 0)); // Adjust based on your data
                // Set other values accordingly
                pst.addBatch();
                pst.executeBatch();
                pst.clearBatch();
              } catch (SQLException e) {
                e.printStackTrace();
              }
            });
      }

      reader.close();
      executor.shutdown();
      while (!executor.isTerminated()) {
        // Wait for all threads to finish
      }

      System.out.println(
          "Data inserted successfully using multiple threads with batch processing from Parquet.");

    } catch (SQLException e) {
      e.printStackTrace();
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
}
