package com.fudan.run.dataset;

import java.sql.Timestamp;
import java.time.format.DateTimeFormatter;
import java.util.Map;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;

import com.fudan.cfg.base.DatasetDef;
import com.fudan.cfg.dataset.MysqlDataset;
import com.fudan.cfg.dataset.MysqlFDataset;
import com.fudan.cfg.dataset.OracleDataset;
import com.fudan.run.JobRunner;
import com.fudan.run.ctx.annotation.DatasetAdapter;

@DatasetAdapter("mysql")
public class MysqlFRdd extends DatasetHandler<MysqlFDataset>{

	@Override
	public JavaRDD<Map<String, Object>> rdd(JobRunner runContext, MysqlFDataset mysqlFDataset) {
		if(mysqlFDataset.getName() == null || "".equals(mysqlFDataset.getName())) {
			mysqlFDataset.setName(mysqlFDataset.getTable());
		}
		Dataset<Row> ds = runContext.getSparkSession().read()
		.format("jdbc")
		.option("driver", "com.mysql.cj.jdbc.Driver")
		.option("url", mysqlFDataset.getSource())
		.option("dbtable", mysqlFDataset.getTable())
		.option("user", mysqlFDataset.getUsername())
		.option("password", mysqlFDataset.getPassword())
		.load();
		if(mysqlFDataset.getCondition() != null && !mysqlFDataset.getCondition().trim().equals("")) {
			ds = ds.where(mysqlFDataset.getCondition());
		}
		return ds.toJavaRDD().map(f->{
			String[] columns = f.schema().fieldNames();
			Map<String,Object> map = new java.util.HashMap<>();
			for(int i = 0;i<columns.length;i++) {
				Object value = f.get(i);
				if(value instanceof  Timestamp && value != null) {
					value = ((Timestamp)value).toLocalDateTime().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
				}
				map.put(columns[i], value);
			}
			return map;
		});
	}

}
