package com.platform.apex.transform;

import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.InputOperator;
import com.datatorrent.common.util.BaseOperator;
import com.platform.apex.cma.db.Grib2DataInfoManager;
import com.platform.apex.cma.tools.Launcher;
import com.platform.apex.util.FlieNameAndContent;
import com.platform.apex.util.HadoopFileOperUtil;
import com.platform.apex.util.KafkaMessageBus;
import com.platform.apex.util.MetaDataUtil;

import ucar.nc2.NetcdfFile;

public class HDFSToJsonOperatorHgt extends BaseOperator implements InputOperator{
	
	//private String hdfs = "hdfs://10.0.3.179:9000";
	private String hdfs ;
	private String user = "root";
	protected transient FileSystem fs;
//	private String hdfsPath;
	private List<String> pathList = new ArrayList();
	
	private Launcher launcher;
	
	private String pathStr;
	
	private static boolean isRunning = false;
	
	private static int number = 0;
	
//	private KafkaMessageBus bs;
	

	
	
	
	@Override
	public void setup(OperatorContext context) {
		try {
			
			HadoopFileOperUtil hdfsUtil = new HadoopFileOperUtil(hdfs);
//			Configuration config = new Configuration();
//			config.set("fs.default.name", hdfs);
//			config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
			
			fs = hdfsUtil.gethdfs();
			launcher = new Launcher();
			//launcher.initHdfsTool(hdfsUtil);
			String[] split = pathStr.split(",");
			
			 pathList = Arrays.asList(split);
			MetaDataUtil.initDBConnector();
			Grib2DataInfoManager.init(MetaDataUtil.getMetadataConnector());
			
			//bs = new KafkaMessageBus<>(100000);
			
			KafkaMessageBus.init(100000);
			
//			System.out.println("HDFSToJsonOperatorHgt init success");
		} catch (Exception e) {
			e.printStackTrace();
		} 
	}
	
	@Override
	public void emitTuples() {
		try {
			if(KafkaMessageBus.size()>0){
				FlieNameAndContent take = KafkaMessageBus.take();
				outhgt.emit(take);
			}
			
		} catch (Exception e) {
			System.out.println(e.getMessage());
		}
		
	}
	
	@Override
	public void beginWindow(long windowId) {
		// TODO Auto-generated method stub
		super.beginWindow(windowId);
		System.out.println(pathList.size() + " " + isRunning + " "+ number + " "+ KafkaMessageBus.size());
		if(pathList.size()>0&&!isRunning&&number<2&&KafkaMessageBus.size()==0){
			isRunning = true;
			String hdfsPath = pathList.get(number);
			try {
					List<String> paths = new ArrayList<>();
					if(hdfsPath.indexOf("hgt")>-1){
						paths.add(hdfsPath);
						NetcdfFile ncfile = getNcfile(hdfsPath);
						launcher.loadNCAndGenerateFile(paths, ncfile,null);
					}else if(hdfsPath.indexOf("uwnd")>-1){
						String[] split2 = hdfsPath.split(" ");
						String fileuwnd = split2[0];
						String filevwnd = split2[1];
						paths.add(fileuwnd);
						paths.add(filevwnd);
						NetcdfFile ncfile = getNcfile(fileuwnd);
						NetcdfFile ncfile1 = getNcfile(filevwnd);
						launcher.loadNCAndGenerateFile(paths, ncfile,ncfile1);
					}
				isRunning = false;
				number++;
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} 
		}
		
	}
	
	
	@Override
	public void endWindow() {
		
	}
//	public final transient DefaultInputPort<String> inputPorthgt = new DefaultInputPort<String>() {
//		@Override
//		public void process(String tuple) {
//			pathList.add(tuple);
////			hdfsPath = tuple;
//		}
//	};
	
	public final transient DefaultOutputPort<Object> outhgt = new DefaultOutputPort<Object>();
	
	
//	public final transient DefaultInputPort<String> inputPortuwndandvwnd = new DefaultInputPort<String>() {
//		@Override
//		public void process(String tuple) {
//			hdfsPath = tuple;
//		}
//	};
	
	
	public NetcdfFile getNcfile(String filePath){
		NetcdfFile ncfile = null;
		try {
			
			Path path = new Path(filePath);
			InputStream is = fs.open(path);
			ByteArrayOutputStream bts = new ByteArrayOutputStream();
			byte[] bytes = new byte[1024];
			int size = -1;
			while ((size = is.read(bytes)) > 0) {
				bts.write(bytes, 0, size);
			}
			
			ncfile = NetcdfFile.openInMemory(path.getName(), bts.toByteArray());
			bytes = null;
			bts.close();
			is.close();
		} catch (Exception e) {
			e.printStackTrace();
			System.out.println(e.getMessage());
		} 
		
		return ncfile;
	}

	public String getHdfs() {
		return hdfs;
	}

	public void setHdfs(String hdfs) {
		this.hdfs = hdfs;
	}

	public String getPathStr() {
		return pathStr;
	}

	public void setPathStr(String pathStr) {
		this.pathStr = pathStr;
	}

	
	

}
