package com.platform.apex.transform;

import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.Operator;
import com.datatorrent.common.util.BaseOperator;
import com.platform.apex.cma.db.Grib2DataInfoManager;
import com.platform.apex.cma.tools.Launcher;
import com.platform.apex.util.HadoopFileOperUtil;
import com.platform.apex.util.MetaDataUtil;

import ucar.nc2.NetcdfFile;

public class HDFSToJsonOperatorUwnd extends BaseOperator implements Operator{
	
	//private String hdfs = "hdfs://10.0.3.179:9000";
	private String hdfs ;
	private String user = "root";
	protected transient FileSystem fs;
	private String hdfsPath;
	
	private Launcher launcher;
	
	private boolean uwndisread = false;
	
	
	
	@Override
	public void setup(OperatorContext context) {
		try {
			HadoopFileOperUtil hdfsUtil = new HadoopFileOperUtil(hdfs);
//			Configuration config = new Configuration();
//			config.set("fs.default.name", hdfs);
//			config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
			fs = hdfsUtil.gethdfs();
			launcher = new Launcher();
			launcher.initHdfsTool(hdfsUtil);
			
			MetaDataUtil.initDBConnector();
			Grib2DataInfoManager.init(MetaDataUtil.getMetadataConnector());
		} catch (Exception e) {
			e.printStackTrace();
		} 
	}
	
	@Override
	public void beginWindow(long windowId) {
		// TODO Auto-generated method stub
		super.beginWindow(windowId);
	}
	
	
	@Override
	public void endWindow() {
		
		if(hdfsPath!=null){
			
			try {
				
				List<String> paths = new ArrayList<>();
				if(hdfsPath.indexOf("uwnd")>-1&&!uwndisread){
					String[] split = hdfsPath.split(" ");
					String fileuwnd = split[0];
					String filevwnd = split[1];
					paths.add(fileuwnd);
					paths.add(filevwnd);
					NetcdfFile ncfile = getNcfile(fileuwnd);
					NetcdfFile ncfile1 = getNcfile(filevwnd);
					
					launcher.loadNCAndGenerateFile(paths, ncfile,ncfile1);
					uwndisread = true;
				}
				
				
				
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} 
		}
		
	}
	
	
	public final transient DefaultInputPort<String> inputPortuwndandvwnd = new DefaultInputPort<String>() {
		@Override
		public void process(String tuple) {
			hdfsPath = tuple;
		}
	};
	
	public NetcdfFile getNcfile(String filePath){
		NetcdfFile ncfile = null;
		try {
			
			Path path = new Path(filePath);
			InputStream is = fs.open(path);
			ByteArrayOutputStream bts = new ByteArrayOutputStream();
			byte[] bytes = new byte[1024];
			int size = -1;
			while ((size = is.read(bytes)) > 0) {
				bts.write(bytes, 0, size);
			}
			
			ncfile = NetcdfFile.openInMemory(path.getName(), bts.toByteArray());
			bytes = null;
			bts.close();
			is.close();
		} catch (Exception e) {
			e.printStackTrace();
			System.out.println(e.getMessage());
		} 
		
		return ncfile;
	}

	public String getHdfs() {
		return hdfs;
	}

	public void setHdfs(String hdfs) {
		this.hdfs = hdfs;
	}
	

}
