package org.databandtech.mockmq;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.zip.GZIPInputStream;

import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.archivers.zip.Zip64Mode;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipParameters;
import org.apache.commons.compress.compressors.gzip.GzipUtils;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FileUtils;
import org.databandtech.mockmq.entity.EpgVod;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/*
 * 如果数据源是gz压缩文件，从压缩文件中读取记录发送到kafka
 */
public class GzFileRead {

	private static Logger logger = LoggerFactory.getLogger(GzFileRead.class);

	public static String CHARSET_UTF8 = "UTF-8";

	public static void main(String[] args) throws IOException {
		String monitoringDir = "D:/home/";
		String ftpOutputDir = "D:/home/";
		List<File> fileList;
		List<File> fileProcessedList = new ArrayList<File>();
		String gzFile = "D:\\#SNM\\#实时大数据\\probemsg_m_gdfos-ps-otv-sv52-sash-snm-tv52_131_20210806153000.cdr.gz";
		
		// 1、解压gz文件,另一个进程处理
//		long starttime = System.currentTimeMillis();
//		
//		CompressFileUtils.decompressByGZip(gzFile,outputDir);
//		
//		long endtime = System.currentTimeMillis();
//		long duration = endtime - starttime;
//		System.out.println("解压缩完成。总计时长： "+duration);
		
		
		// 1、监控是否有新的已解压文件，每N分钟执行一次
		fileList = (List<File>) FileUtils.listFiles(new File(monitoringDir), new String[] { "cdr", "CDR" }, true);
		// fileList.stream().forEach(file -> FileUtil.modifyFileContent(file, "ccc", "aaa"));
		fileList.stream().forEach(file -> FileUtil.showFilename(file));

		// 2、读取文件，转换为string list数组, 输出到kafka
		long starttime = System.currentTimeMillis();

		for (File file : fileList) {
			if (fileProcessedList==null || !fileProcessedList.contains(file)) {// 如果是新文件则处理
				List<String> fileStrList = FileUtils.readLines(file, CHARSET_UTF8);
				for (String str : fileStrList) {
					KafkaUtils.send(str);
				}
				fileProcessedList.add(file);
			}
		}

		long endtime = System.currentTimeMillis();
		long duration = endtime - starttime;
		System.out.println("完成。总计时长： " + duration);

	}

}
