package com.fudan.run.dataset;

import java.io.Serializable;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.rdd.JdbcRDD;

import com.fudan.cfg.base.DatasetDef;
import com.fudan.cfg.dataset.MysqlDataset;
import com.fudan.run.JobRunner;
import com.fudan.run.ctx.annotation.DatasetAdapter;

import scala.reflect.ClassManifestFactory$;
import scala.runtime.AbstractFunction0;
import scala.runtime.AbstractFunction1;

//@DatasetAdapter("mysql")
public class MysqlRdd extends DatasetHandler<MysqlDataset>{

	@Override
	public JavaRDD<Map<String, Object>> rdd(JobRunner context, MysqlDataset mysqlDataset) {
		try {
			Class.forName("com.mysql.jdbc.Driver");
		} catch (ClassNotFoundException e) {
			throw new RuntimeException("没有找到mysql的驱动");
		}
		String table = mysqlDataset.getTable();
		String condition = mysqlDataset.getCondition();
		if(null == condition || "".equals(condition)) {
			condition = "1=1";
		}
		String sql = "select * from "+table +" where " +condition+" limit ?,?";
		JdbcRDD<Map> jdbcRdd = new JdbcRDD<Map>(JavaSparkContext.toSparkContext(context.getJavaSparkContext()), new getConn(mysqlDataset), sql, mysqlDataset.getFrom(), mysqlDataset.getSize(), mysqlDataset.getPartition(), new MapResult(), ClassManifestFactory$.MODULE$.fromClass(Map.class));
		JavaRDD<Map<String,Object>> rdd = jdbcRdd.toJavaRDD().map(m->{
			Map<String,Object> map = new HashMap<>();
			map.putAll(m);
			return map;
		});
		return rdd;
	}
	static class getConn extends AbstractFunction0<Connection> implements Serializable {

		private MysqlDataset datasetNode;
		public getConn(MysqlDataset datasetNode) {
			this.datasetNode = datasetNode;
		}

		@Override
		public Connection apply() {
			try {
				Properties properties = new Properties();
	            properties.setProperty("user", datasetNode.getUsername());
	            properties.setProperty("password", datasetNode.getPassword());
				Connection connection = DriverManager.getConnection(datasetNode.getSource(), properties);
				return connection;
			} catch (SQLException e) {
				e.printStackTrace();
			}
			return null;
		}
	}
	static class MapResult extends AbstractFunction1<ResultSet, Map> implements Serializable {
        @Override
        public Map apply(ResultSet row) {
        	Map<String,Object> map = new HashMap<>();
        	try {
        	int columnCount = row.getMetaData().getColumnCount();
				for(int i = 1;i<=columnCount;i++) {
					Object object = row.getObject(i);
					String key = row.getMetaData().getColumnName(i);
					map.put(key, object);
				}
			} catch (SQLException e) {
				e.printStackTrace();
			}
            return map;
        }
    }
}
