package com.lj.rdbmsToHDFS;

import static org.testng.Assert.assertNotSame;
import static org.testng.AssertJUnit.assertEquals;

import java.util.Properties;

import org.apache.sqoop.client.SqoopClient;
import org.apache.sqoop.model.MConfigList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MPersistableEntity;
import org.apache.sqoop.validation.Status;

import com.lj.conn.DatabaseProvider;
import com.lj.conn.DatabaseProviderFactory;
import com.lj.conn.IRDMSDatabaseProvider;
import com.lj.conn.MysqlConnection;

public class RDBMSToHDFS {
	private final static String serverUrl = "http://172.16.135.160:duankou/sqoop/";
	protected static DatabaseProvider provider;

	public void testMysql() throws ClassNotFoundException,
			IllegalAccessException, InstantiationException {
		SqoopClient client = new SqoopClient(serverUrl);
		// 创建一个来自于关系型数据库的link
		MLink fromRDBMSLink = client.createLink("A_Test_RDBMS_LINK");
		// 由于这里测试的是mysql，所有这里创建mysql
		// 这里由于
		Properties pro = new Properties();
		pro.setProperty("sqoop.provider.class", "MySQLProvider");
		provider = DatabaseProviderFactory.getProvider(pro);
		/*if(provider.)*/
		// 然后填充上mysql的配置
		fillRdbmsLinkConfig(fromRDBMSLink);
		// 保存连接
		saveLink(fromRDBMSLink, client);

		// 创建到hdfs的连接
		MLink toHDFSLink = client.createLink("HDFS_LINK");
		fillHdfsLink(toHDFSLink);
		// 保存连接
		saveLink(toHDFSLink, client);

		// 创建一个sqoop的Job
		MJob job = client.createJob(fromRDBMSLink.getPersistenceId(),
				toHDFSLink.getPersistenceId());

	}

	/**
	 * Fill link config based on currently active provider.
	 * 
	 * @param link
	 *            MLink object to fill
	 */
	protected void fillRdbmsLinkConfig(MLink link) {
		MConfigList configs = link.getConnectorLinkConfig();
		configs.getStringInput("linkConfig.jdbcDriver").setValue(
				provider.getJdbcDriver());
		configs.getStringInput("linkConfig.connectionString").setValue(
				provider.getConnectionUrl());
		configs.getStringInput("linkConfig.username").setValue(
				provider.getConnectionUsername());
		configs.getStringInput("linkConfig.password").setValue(
				provider.getConnectionPassword());
	}

	protected void fillHdfsLink(MLink link) {
		MConfigList configs = link.getConnectorLinkConfig();
		configs.getStringInput("linkConfig.confDir")
				.setValue("/test_rdbms/tmp");
	}

	/**
	 * Create link.
	 * 
	 * With asserts to make sure that it was created correctly.
	 * 
	 * @param link
	 */
	protected void saveLink(MLink link, SqoopClient client) {
		assertEquals(Status.OK, client.saveLink(link));
		assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT,
				link.getPersistenceId());
	}

	/*protected void fillRdbmsFromConfig(MJob job, String partitionColumn) {
		MConfigList fromConfig = job.getFromJobConfig();
		fromConfig.getStringInput("fromJobConfig.tableName").setValue(
				provider.escapeTableName(getTableName().getTableName()));
		fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(
				provider.escapeColumnName(partitionColumn));
	}*/

	/**
	 * Create job.
	 * 
	 * With asserts to make sure that it was created correctly.
	 * 
	 * @param job
	 */
	protected void saveJob(MJob job, SqoopClient client) {
		assertEquals(Status.OK, client.saveJob(job));
		assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT,
				job.getPersistenceId());
	}

	/*protected void fillRdbmsFromConfig(MJob job, String partitionColumn) {
		MConfigList fromConfig = job.getFromJobConfig();
		fromConfig.getStringInput("fromJobConfig.tableName").setValue(
				provider.escapeTableName(getTableName().getTableName()));
		fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(
				provider.escapeColumnName(partitionColumn));
	}*/
	
	
	public static void main(String[] args) throws ClassNotFoundException, IllegalAccessException, InstantiationException{
		RDBMSToHDFS test = new RDBMSToHDFS();
		test.testMysql();
		
		
	}

}
