package com.tushen.task;

import com.tushen.NCTask;
import com.tushen.utils.SqlSessionFactoryUtil;
import com.tushen.utils.Utils;
import org.apache.ibatis.session.SqlSession;
import ucar.ma2.Array;
import ucar.nc2.NetcdfFile;
import ucar.nc2.Variable;

import java.io.File;
import java.io.FileWriter;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.Time;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Date;

/**
 * 中国海表层流场NC数据处理
 * * 处理流程：
 * 1.获取nc文件入库至nc_shp_result_版本号
 * 2.将nc_shp_result_版本号 抽稀至 uv_nc_shp_layer
 * 3.创建索引
 * @author likun
 * @version 1.0
 * @date 2020/12/12 4:47 下午
 */
public class NC {
    public static void main(String[] args){
        String filePath="/Users/likun/Downloads/nwp_cur_20190523.nc";
        if(args.length<2){
            System.out.println("please input path");
            return;
        }
        filePath=args[0];
        File file=new File("D:/tmp");
//        File file=new File("/Users/likun/Downloads/tmp");
        if(!file.exists()){
            file.mkdir();
        }
        String tableName=args[1];
        String outPath="D:/tmp/"+tableName+".csv";//nc_shp_result_ss 根据版本号生成的文件
//        String outPath="/Users/likun/Downloads/tmp/"+tableName+".csv";//nc_shp_result_ss 根据版本号生成的文件

        processNC(filePath,outPath,tableName);
    }

    public static void test(){
        String path="/Users/likun/Downloads/nwp_cur_20190523.nc";


        NCTask.readNC(path);
    }

    // 创建数据表
    private static void createTable(String tableName){
        String sql="DROP TABLE IF EXISTS "+tableName+";" +
                "CREATE TABLE "+tableName+" ("+
                "  dt date NOT NULL," +
                "  tm time(0) NOT NULL," +
                "  x int2 NOT NULL," +
                "  y int2 NOT NULL," +
                "  u float4," +
                "  v float4" +
                ");";
        SqlSessionFactoryUtil.excuteUpdateSql(sql);
        System.out.println(tableName+" created;");
    }

    // 创建数据表索引
    private static void createTableIndex(String tableName){
        String sql="CREATE INDEX "+tableName+"_dtm_idx ON "+tableName+" USING btree (" +
                "  dt pg_catalog.date_ops ASC NULLS LAST," +
                "  tm pg_catalog.time_ops ASC NULLS LAST);";
        SqlSessionFactoryUtil.excuteUpdateSql(sql);
        System.out.println(tableName+" index created;");

        //nc_shp_result_version 转uv_nc_shp_layer_version
        String extractResultName="uv_"+tableName.replace("result","layer");
        // 中国海表层流场抽稀
        String sqlExtract="with a as (\n" +
                "SELECT dt,tm,u,v,a.x,a.y,case \n" +
                "when y%40=0 and x%40=0 then 1 \n" +
                "when  y%20=0 and x%20=0 then 2 \n" +
                "when  y%10=0 and x%10=0 then 3\n" +
                "when  y%5=0 and x%5=0 then 4\n" +
                "end  as class\n" +
                "from "+tableName+" a where \n" +
                "  y%5=0 and x%5=0)\n" +
                "SELECT dt,tm,u,v,a.x,a.y,geom,class \n" +
                "into "+extractResultName +
                " from  a inner join nc_shp b on a.x=b.x and a.y=b.y";
        SqlSessionFactoryUtil.excuteUpdateSql(sqlExtract);
        System.out.println(extractResultName+"表层流场抽稀完成;");

        sql="CREATE INDEX "+extractResultName+"_dtm_idx ON "+extractResultName+" USING btree (" +
                "  dt pg_catalog.date_ops ASC NULLS LAST," +
                "  tm pg_catalog.time_ops ASC NULLS LAST);";
        SqlSessionFactoryUtil.excuteUpdateSql(sql);
        System.out.println(extractResultName+" index created;");


    }

    public static void processNC(String fileName,String outFilePath,String tableName){

        File file=new File(outFilePath);
        if(file.exists()){
            boolean flag= file.delete();
            System.out.println("delete outFile:"+flag);
        }
        createTable(tableName);

        try {
            NetcdfFile netcdfFile=NetcdfFile.open(fileName);
            Variable lat = netcdfFile.findVariable("lat");
            Array lat_data=lat.read();
            lat = netcdfFile.findVariable("lon");
            Array lon_data=lat.read();
            DateTimeFormatter formatter = DateTimeFormatter.ofPattern("YYYYMMddHH");
            LocalDateTime startTime = LocalDateTime.of(1990, 1, 1, 0, 0, 0);
            Variable time=netcdfFile.findVariable("time");
            Variable u=netcdfFile.findVariable("u");
            Variable v=netcdfFile.findVariable("v");
            int[] shp={1,1,(int)lat_data.getSize(),(int)lon_data.getSize()};
            Array timeData=time.read();
            long start=System.currentTimeMillis();
            for (int i=0;i<timeData.getSize();i++){
                int[] origin={i,0,0,0};
                LocalDateTime dtm=startTime.plusHours(timeData.getLong(i));
                String fileNamePath="nwp_cur_"+ formatter.format(dtm) + ".csv";
                float[][] u_data=(float[][])u.read(origin,shp).reduce(0).reduce(0).copyToNDJavaArray();
                float[][] v_data=(float[][])v.read(origin,shp).reduce(0).reduce(0).copyToNDJavaArray();
                writeData2File(dtm,outFilePath,u_data,v_data,lat_data,lon_data);// 将nc数据处理结果生成的存入数据库
                if(i%40==0){
                    dumpNCData(outFilePath,tableName);
                    file.delete();
                }
                System.out.println(i+": "+fileNamePath+" finished");
            }
            long end=System.currentTimeMillis();
            double div=(end-start)/1000.0;

            dumpNCData(outFilePath,tableName);//入库
            file.delete();
            long end2=System.currentTimeMillis();
            div=(end2-end)/1000.0;
            System.out.println("最后一次入库耗时:"+div+"秒");

            createTableIndex(tableName);//添加索引
            long end3=System.currentTimeMillis();
            div=(end3-end2)/1000.0;
            System.out.println("创建索引耗时:"+div+"秒");
            div=( System.currentTimeMillis()-start)/1000.0;
            System.out.println("总耗时:"+div+"秒");

        }catch (Exception e){
            e.printStackTrace();
        }
    }

    private static void writeData2File(LocalDateTime dtm,String outFilePath,float[][] u_data,float[][] v_data,Array lat_data,Array lon_data){
        ZoneId zoneId = ZoneId.systemDefault();
        ZonedDateTime zdt = dtm.atZone(zoneId);//Combines this date-time with a time-zone to create a  ZonedDateTime.
        Date date = Date.from(zdt.toInstant());
        java.sql.Date dt=new java.sql.Date(date.getTime());
        Time tm=new java.sql.Time(date.getTime());
        try {
            FileWriter fileWriter=new FileWriter(outFilePath,true);
            for (int j = 0; j < u_data.length /*txtArray.length*/; j++) {
                for (int j2 = 0; j2 < u_data[0].length/*txtArray[j].length*/; j2++) {
                    float uu = u_data[j][j2];
                    float vv = v_data[j][j2];
                    if (uu > 100) {
                        continue;
                    }
                    float direction = (float) Utils.getDirection(uu, vv);
                    float speed = (float) Math.sqrt(uu * uu + vv * vv);
                    StringBuilder sb=new StringBuilder(dt+",");
                    sb.append(tm+",");
                    sb.append(j2+",");
                    sb.append(j+",");
                    sb.append(direction+",");
                    sb.append(speed);
                    sb.append(System.lineSeparator());
                    fileWriter.write(sb.toString());
                }
            }
            fileWriter.flush();
            fileWriter.close();
        }catch (Exception e){
            e.printStackTrace();
        }
    }

    //将写入到文件的数据入到数据库
    private static void dumpNCData(String outFilePath,String tableName){
        SqlSession sqlSession=SqlSessionFactoryUtil.getSqlSessionFactory().openSession();
        Connection connection=sqlSession.getConnection();
        try {
            Statement stm=connection.createStatement();
            String sql="copy "+tableName+" from '"+outFilePath+"'  DELIMITER ',' csv header";
            System.out.println("执行入库:"+sql);
            boolean flag=stm.execute(sql);
            System.out.println("入库完成");
            stm.close();
            connection.commit();
            connection.close();
            sqlSession.close();
        }catch (Exception e){
            e.printStackTrace();
        }
    }
}
