package com.platform.apex.input;

import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.poi.ddf.EscherSerializationListener;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;

import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultOutputPort;
import com.datatorrent.api.InputOperator;
import com.datatorrent.common.util.BaseOperator;
import com.platform.apex.cma.tools.ConvertUtil;
import com.platform.apex.util.HadoopFileOperUtil;

public class HDFSInoutOperator extends BaseOperator implements InputOperator {

	//private String hdfs = "hdfs://10.0.3.179:9000";
	private String hdfs ;
	private String user = "root";
	protected transient FileSystem fs;
	private String hdfsPath ;

	private static int pathNumber = 0;

	private String pathStr;
	private List<String> pathList ;
//	private KafkaMessageBus queue ;
	
	protected int oneFileIndex = 0;
	protected int hourIndex = 0;

//	public final transient DefaultOutputPort<String> outhgt = new DefaultOutputPort<String>();
	
	List<Map<String,Object>> esJsonList = new ArrayList<Map<String,Object>>();
	public final transient DefaultOutputPort<Object> esOutPut = new DefaultOutputPort<Object>();
	
	protected int totalnumber;
	
	private static int windowNum = 0;

	@Override
	public void setup(OperatorContext context) {
		super.setup(context);
		try {
			
//			 queue = new KafkaMessageBus(1000);
			HadoopFileOperUtil hdfsUtil = new HadoopFileOperUtil(hdfs);
//			Configuration config = new Configuration();
//			config.set("fs.default.name", hdfs);
//			config.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
			fs = hdfsUtil.gethdfs();
			
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		
	}

	@Override
	public void beginWindow(long windowId) {
		
		if(windowNum==0){
			windowNum++;
			try {
				FSDataInputStream fis = fs.open(new Path(hdfsPath));
				xlsxToCsv(fis);
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} 
		}
	
	}

	@Override
	public void emitTuples() {
		
		
		if(esJsonList.size()>0&&totalnumber<esJsonList.size()){
			esOutPut.emit(esJsonList.get(totalnumber));
			
			totalnumber++;
		}
		
		
//		esOutPut.emit("eason");
		
	}

	@Override
	public void endWindow() {
		// TODO Auto-generated method stub
		super.endWindow();
	}

	public String getHdfs() {
		return hdfs;
	}

	public void setHdfs(String hdfs) {
		this.hdfs = hdfs;
	}

	public String getUser() {
		return user;
	}

	public void setUser(String user) {
		this.user = user;
	}

	public String getHdfsPath() {
		return hdfsPath;
	}

	public void setHdfsPath(String hdfsPath) {
		this.hdfsPath = hdfsPath;
	}

	public String getPathStr() {
		return pathStr;
	}

	public void setPathStr(String pathStr) {
		this.pathStr = pathStr;
	}

	
	private  String xlsxToCsv(InputStream is) throws Exception{
	//	ByteArrayInputStream arrayInputStream=new ByteArrayInputStream(bytes);
		Workbook wb = WorkbookFactory.create(is);
		Sheet sheet = wb.getSheetAt(0);
		
		int num = -1;
		int firstLine = 0;
		StringBuilder sb =new StringBuilder();
		List<String> keyList = new ArrayList<String>();
		for (org.apache.poi.ss.usermodel.Row row : sheet) {
			Map<String,Object> map = new HashMap<String,Object>();
			if (row == null)
				break;
			int cellnum = row.getLastCellNum();
			if(num < 0){
				num = cellnum;
			}
			if(firstLine==0){
				for (int i = 0; i < cellnum; i++) {
					Cell cell = row.getCell(i);
					if (cell == null) {
						continue;
					}
					keyList.add(cell.toString());
				}
				firstLine++;
			}else{
				if(keyList.size()!=0){
					for (int i = 0; i < cellnum; i++) {
						Cell cell = row.getCell(i);
//						if (cell == null) {
//							sb.append(" ").append(";");
//							continue;
//						}
						if(cell.getCellType()== Cell.CELL_TYPE_NUMERIC){
							map.put(keyList.get(i), cell.getNumericCellValue());
						}else{
							map.put(keyList.get(i), cell.toString());
						}
					}
				//	String jsonToStr = ConvertUtil.objectToStr(map);
					
					esJsonList.add(map);
				}
				
			}
			
//			if(num > cellnum){
//				for (int i = 0; i < num-cellnum; i++) {
//					sb.append("").append(";");
//				}
//			}
//			sb.deleteCharAt(sb.length()-1);
//			sb.append(System.getProperty("line.separator"));
			
			
		}
		return sb.toString();
	}
	
	 
//	public static void main(String[] args) {
//		  try {
//			FileInputStream fis = new FileInputStream("/Users/eason/Desktop/static.xlsx");
//			xlsxToCsv(fis);
//			
//		} catch (Exception e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}
//	}
	
}
