package com.chenjj.etl.datatrans.converter;


import com.chenjj.etl.datatrans.hdfs.HdfsClientException;
import com.chenjj.etl.datatrans.hdfs.HdfsClientUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPInputStream;

@SuppressWarnings("Duplicates")
public class DataConverter {

    private static  Logger logger = LoggerFactory.getLogger(DataConverter.class);

    /**
     *  直接读取gz文件进行定长转定界的操作，输出到outpuFile
     * @param gzFilePath   压缩文件路径
     * @param flgFilePath flg文件路径
     * @param outputFilePath 输出文件
     * @param characterset  压缩文件字符集
     */
    public static void convertGzFile(String gzFilePath ,String flgFilePath,String outputFilePath,String characterset){
        //1.解析flg文件
        List<DataMetaInfo> meta = new ArrayList<DataMetaInfo>();
        BufferedReader bufferedReader = null;
        try {
            bufferedReader = new BufferedReader(new InputStreamReader(HdfsClientUtil.getInstance().open(flgFilePath)));
            String line = "";
            boolean point = false;
            while ((line = bufferedReader.readLine()) != null) {
                if (line.contains("COLUMNDESCRIPTION=")){
                    point = true;
                    continue;
                }
                if (point){
                    if (!line.trim().equalsIgnoreCase("")){
                        DataMetaInfo field = new DataMetaInfo(line);
                        logger.info("DataFiled add : " + field.toString());
                        meta.add(field);
                    }
                }
            }
        }catch (Exception e){
            throw  new RuntimeException("解析flg文件失败",e);
        }finally {
            if (bufferedReader!=null){
                try {
                    bufferedReader.close();
                } catch (IOException e) {
                    e.printStackTrace();;
                    logger.error("bufferedReader.close() failed",e);
                }
            }
        }
        logger.info("解析flg文件成功,flgFilePath={}",flgFilePath);

        //2.转换并输出新文件
        InputStream in = null;
        GZIPInputStream gzis = null;
        OutputStream  os = null ;
        OutputStreamWriter osw = null;
        BufferedWriter bw = null;
        try {
            in = HdfsClientUtil.getInstance().open(gzFilePath);
            gzis = new GZIPInputStream(in);

            os = HdfsClientUtil.getInstance().create(outputFilePath);
            osw = new OutputStreamWriter(os,"UTF-8");
            bw = new BufferedWriter(osw);

            int count = meta.get(meta.size() - 1).getEndIndex() + 1;//每行的长度
            byte[] bytes = new byte[count];
            int readCount = 0;
            int rowIndex = 0;
            while ((readCount = gzis.read(bytes)) != -1) {
                rowIndex ++;
                //实际读取的字节长度不足每行的长度
                while(count > readCount){
                    byte[] tempBytes = new byte[count - readCount];
                    int readCountTemp = gzis.read(tempBytes);//继续再读不足的差值长度
                    if (readCountTemp == -1){
                        throw new RuntimeException("数据文件异常,rownIndex="+rowIndex+",readCount="+readCount);
                        //logger.error("数据文件异常,rownIndex="+rowIndex+",readCount="+readCount);
                        //break;
                    }
                    System.arraycopy(tempBytes,0,bytes,readCount,readCountTemp);// 将没有读满的一行字节流补满
                    readCount += readCountTemp;
                }
                String newLine = "";
                for (int i =0; i<meta.size(); i++){
                    DataMetaInfo df = meta.get(i);
                    int clength = (df.getEndIndex() - df.getStartIndex())+1;//复制的长度
                    int cstart = df.getStartIndex() -1;//起始位置
                    byte[] copyBytes = new byte[clength];
                    System.arraycopy(bytes,cstart,copyBytes,0,clength);
                    String temp = new String(copyBytes,characterset).trim();
                    //替换字段中的换行符，\001
                    temp = temp.replace("\n"," ");
                    temp = temp.replace((char)1,' ');

                    newLine = newLine + temp + (char)1;
                }

                newLine =newLine.substring(0,newLine.length()-1);
                bw.write(newLine);
                bw.newLine();
                bw.flush();;
            }
        }catch (Exception e){
            throw  new RuntimeException("转换并输出新文件失败",e);
        }finally {
            if (in !=null){
                try {
                    in.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if (gzis != null){
                try {
                    gzis.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(os!=null){
                try {
                    os.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(osw!=null){
                try {
                    osw.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(bw!=null){
                try {
                    bw.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        logger.info("定长转定界成功,outputFilePath={}",outputFilePath);
    }


    /**
     *  将gzFile解压之后，进行定长转定界的操作，输出到outpuFile
     * @param gzFilePath   压缩文件路径
     * @param uncompressFilePath  解压之后的文件路径
     * @param flgFilePath flg文件路径
     * @param outputFilePath 输出文件
     * @param characterset   压缩文件字符集
     * @param delete  删除标志，true:完成转换之后删除转换前的文件, flase:不删除
     */
    public static void convertGzFile(String gzFilePath,String uncompressFilePath,String flgFilePath,String outputFilePath,String characterset,boolean delete){
        //1.解压文件
        try {
            HdfsClientUtil.getInstance().uncompress(gzFilePath,uncompressFilePath);
        } catch (HdfsClientException e) {
            throw  new RuntimeException("解压文件失败",e);
        }
        logger.info("解压文件成功,gziFilelPaht={},uncompressFilePath={}",gzFilePath,uncompressFilePath);

        //2.解析flg文件
        List<DataMetaInfo> meta = new ArrayList<DataMetaInfo>();
        BufferedReader bufferedReader = null;
        try {
            bufferedReader = new BufferedReader(new InputStreamReader(HdfsClientUtil.getInstance().open(flgFilePath)));
            String line = "";
            boolean point = false;
            while ((line = bufferedReader.readLine()) != null) {
                if (line.contains("COLUMNDESCRIPTION=")){
                    point = true;
                    continue;
                }
                if (point){
                    if (!line.trim().equalsIgnoreCase("")){
                        DataMetaInfo field = new DataMetaInfo(line);
                        logger.info("DataFiled add : " + field.toString());
                        meta.add(field);
                    }
                }
            }
        }catch (Exception e){
            throw  new RuntimeException("解析flg文件失败",e);
        }finally {
            if (bufferedReader!=null){
                try {
                    bufferedReader.close();
                } catch (IOException e) {
                    logger.error("bufferedReader.close() failed",e);
                }
            }
        }
        logger.info("解析flg文件成功,flgFilePath={}",flgFilePath);

        //3.转换并输出新文件
        InputStream in = null;
        BufferedInputStream bis = null;
        OutputStream  os = null ;
        OutputStreamWriter osw = null;
        BufferedWriter bw = null;
        try {
            in = HdfsClientUtil.getInstance().open(uncompressFilePath);
            bis = new BufferedInputStream(in);

            os = HdfsClientUtil.getInstance().create(outputFilePath);
            osw = new OutputStreamWriter(os,"UTF-8");
            bw = new BufferedWriter(osw);

            int count = meta.get(meta.size() - 1).getEndIndex() + 1;//每行的长度
            byte[] bytes = new byte[count];
            int readCount = 0;
            int rowIndex = 0;
            while ((readCount = bis.read(bytes)) != -1) {
                rowIndex ++;
                //实际读取的字节长度不足每行的长度
                while(count > readCount){
                    byte[] tempBytes = new byte[count - readCount];
                    int readCountTemp = bis.read(tempBytes);//继续再读不足的差值长度
                    if (readCountTemp == -1){
                        throw new RuntimeException("数据文件异常,rownIndex="+rowIndex+",readCount="+readCount);
                    }
                    System.arraycopy(tempBytes,0,bytes,readCount,readCountTemp);// 将没有读满的一行字节流补满
                    readCount += readCountTemp;
                }
                String newLine = "";
                for (int i =0; i<meta.size(); i++){
                    DataMetaInfo df = meta.get(i);
                    int clength = (df.getEndIndex() - df.getStartIndex())+1;//复制的长度
                    int cstart = df.getStartIndex() -1;//起始位置
                    byte[] copyBytes = new byte[clength];
                    System.arraycopy(bytes,cstart,copyBytes,0,clength);
                    String temp = new String(copyBytes,characterset).trim();
                    //替换字段中的换行符，\001
                    temp = temp.replace("\n"," ");
                    temp = temp.replace((char)1,' ');

                    newLine = newLine + temp + (char)1;
                }

                newLine =newLine.substring(0,newLine.length()-1);
                bw.write(newLine);
                bw.newLine();
                bw.flush();;
            }
        }catch (Exception e){
            throw  new RuntimeException("转换并输出新文件失败",e);
        }finally {
            if (in !=null){
                try {
                    in.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if (bis != null){
                try {
                    bis.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(os!=null){
                try {
                    os.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(osw!=null){
                try {
                    osw.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }if(bw!=null){
                try {
                    bw.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        if (delete){
            HdfsClientUtil.getInstance().rm(uncompressFilePath);
        }

    }





}
