package com.platform.apex.output;

import org.apache.hadoop.fs.FileSystem;

import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultInputPort;
import com.datatorrent.api.Operator;
import com.platform.apex.util.FlieNameAndContent;
import com.platform.apex.util.HadoopFileOperUtil;

public class HDFSOutputOperator implements Operator {

	private String hdfs;
	private String user = "root";
//	protected transient FileSystem fs;
	protected transient HadoopFileOperUtil hdfsUtil;

	@Override
	public void setup(OperatorContext context) {

		try {
//			Configuration config = new Configuration();
//			config.set("fs.default.name", hdfs);
//			config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
//			fs = FileSystem.get(new URI(hdfs), config, user);
			
			 hdfsUtil = new HadoopFileOperUtil(hdfs);
//			Configuration config = new Configuration();
//			config.set("fs.default.name", hdfs);
//			config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
//			fs = hdfsUtil.gethdfs();
			
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	public final transient DefaultInputPort<Object> inputPort = new DefaultInputPort<Object>() {
		@Override
		public void process(Object tuple) {
			if(tuple!=null){
				if(tuple instanceof FlieNameAndContent){
					FlieNameAndContent fn = (FlieNameAndContent)tuple;
					hdfsUtil.createFile(fn.getPathName(), fn.getContent());
				}else{
					//System.out.println("-------------"+tuple);
				}
				
			}
		}
	};

	@Override
	public void teardown() {
		// TODO Auto-generated method stub

	}

	@Override
	public void beginWindow(long windowId) {
		// TODO Auto-generated method stub
		

	}

	@Override
	public void endWindow() {

//		try {
//			System.out.println("-------- endwindow size "+fileList.size());
//			if(fileList.size()>0&&totalNumber<=fileList.size()){
//				StringBuilder sb = fileList.get(totalNumber);
//				String[] filesArr = sb.toString().split(" ");
//				String ncfName = filesArr[1];
//				System.out.println("--------------"+filesArr[0]);
//				
//				InputStream in = new ByteArrayInputStream(filesArr[0].getBytes("UTF-8"));
//				
//				String hdfs = "/eason/netcdf/" + ncfName;
//				FSDataOutputStream fo = fs.create(new Path(hdfs));
//				int len = 0;
//				byte[] b = new byte[1024];
//				while ((len = in.read(b)) > -1) {
//					fo.write(b, 0, len);
//				}
//
//				in.close();
//				fo.close();
//				totalNumber++;
//			}
//			
//		} catch (Exception e) {
//			e.printStackTrace();
//		}

	}

	public String getHdfs() {
		return hdfs;
	}

	public void setHdfs(String hdfs) {
		this.hdfs = hdfs;
	}

	
}
