package com.surfilter.massdata.spark.input;

import java.util.HashMap;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.DataFrameReader;
import com.act.sparkanalyz.service.impl.SparkService.ServiceContext;
import com.act.sparkanalyz.input.ISparkInput;
public class OracleSparkInput implements  ISparkInput{
	
	private static final long serialVersionUID = 1L;
	
	private static final Log log = LogFactory.getLog(OracleSparkInput.class);
	
	
	private String sql;
	private String driver;
	private String url;
	private String user;
	private String passwd;
	private String table;

	@Override
	public Map<String, DataFrame> read(ServiceContext serviceContext, Map<String, String> commandMap) throws Exception {
		Map<String, DataFrame> result = new HashMap<String, DataFrame>();
		try{
			DataFrame dataFrame = null;
			
				Map<String, String> options = new HashMap<String, String>();
				options.put("url", url);
				options.put("driver", driver);
				options.put("dbtable", "br2005");
				options.put("user", user);
				options.put("password", passwd);
				DataFrameReader reader = serviceContext.getSqlCtx().read().format("jdbc").options(options);
				dataFrame = reader.load();
			dataFrame.registerTempTable(table);
			//dataFrame.persist(StorageLevel.MEMORY_AND_DISK_SER());
			result.put(table, dataFrame);
		}catch(Exception e){
			if(log.isErrorEnabled()){
				log.error(e.getMessage(),e);
			}
		}
		return result;
	}


}
