package com.czy.kettle;

import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.DatabaseMetaData;
import java.util.*;

import org.pentaho.big.data.kettle.plugins.hive.Hive2DatabaseMeta;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap;
import org.pentaho.di.core.hadoop.HadoopConfigurationInfo;
import org.pentaho.di.core.hadoop.HadoopConfigurationPrompter;
import org.pentaho.di.core.hadoop.HadoopSpoonPlugin;
import org.pentaho.di.core.lifecycle.LifecycleException;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.plugins.*;
import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta;

public class Example {

    private static final Integer KETTLE_LOG_MAX_SIZE = 5000;

    private static final Integer KETTLE_MAX_LOG_TIMEOUT_MINUTES = 720;

    private static final Integer DEFAULT_DATABASE_META_ID = 100;

    private static final String KETTLE_DEFAULT_NAME = "default";

    public static void main(String[] args) throws KettleException, LifecycleException, MalformedURLException {
        KettleLogStore.init(KETTLE_LOG_MAX_SIZE, KETTLE_MAX_LOG_TIMEOUT_MINUTES);
        KettleEnvironment.init();
//        PropsUI.init( "KettleWebConsole", Props.TYPE_PROPERTIES_KITCHEN );
        File path = new File("samples/repository");
        KettleFileRepositoryMeta meta = new KettleFileRepositoryMeta();
        meta.setBaseDirectory(path.getAbsolutePath());
        meta.setDescription(KETTLE_DEFAULT_NAME);
        meta.setName(KETTLE_DEFAULT_NAME);
        meta.setReadOnly(false);
        meta.setHidingHiddenFiles(true);

        // HadoopSpoonPlugin插件注册，读取plugin.properties文件  start
        String classname = "org.pentaho.di.core.hadoop.HadoopSpoonPlugin";
        Class<LifecyclePluginType> pluginType = LifecyclePluginType.class;
        Map<Class<?>, String> classMap = new HashMap<Class<?>, String>();

        PluginMainClassType mainClassTypesAnnotation = pluginType.getAnnotation( PluginMainClassType.class );
        classMap.put( mainClassTypesAnnotation.value(), classname );

        // 解决打包成jar包时从jar包里读取文件url的问题
        URL resource = Example.class.getClassLoader()
                .getResource("");
        String tempUri = resource.toString().replace("classes!", "classes");
        URL url = new URL(tempUri);
        System.out.println("new url " + url);
        // end

        Plugin plugin = new Plugin(new String[] {HadoopSpoonPlugin.PLUGIN_ID },
                StepPluginType.class,
                LifecyclePluginType.class.getAnnotation(PluginMainClassType.class).value(),
                "", "", "", null, false, false,
                classMap, new ArrayList<String>(), null, url);
        PluginRegistry.getInstance().registerPlugin(LifecyclePluginType.class, plugin);
        // HadoopSpoonPlugin插件注册 end


        HadoopConfigurationBootstrap hadoopConfigurationBootstrap = HadoopConfigurationBootstrap.getInstance();

        // 赋值Prompter
        hadoopConfigurationBootstrap.setPrompter(new HadoopConfigurationPrompter() {
            @Override
            public String getConfigurationSelection(final List<HadoopConfigurationInfo> hadoopConfigurationInfos) {
                return "hdp30";
            }

            @Override
            public void promptForRestart() {

            }
        });

        // hadoop环境初始化，根据plugin.active.hadoop.configuration属性到对应目录下读取hadoop-configurations里的配置文件
        hadoopConfigurationBootstrap.onEnvironmentInit();


        DatabasePluginType databasePluginType = DatabasePluginType.getInstance();
		databasePluginType.registerCustom(Hive2DatabaseMeta.class, "", "HIVE2", "HIVE2", "", "");

        PluginRegistry registry = PluginRegistry.getInstance();
        List<PluginInterface> plugins = registry.getPlugins(DatabasePluginType.class);

        Iterator<PluginInterface> iterator = plugins.iterator();
        boolean flag = false;
        while (iterator.hasNext()) {
            PluginInterface pluginIn = iterator.next();
            String name = pluginIn.getName();

            if (name.toUpperCase().contains("HIVE")) {
                flag = true;
            }

            System.out.println("当前数据库的name: " + name);
//			if (!SUPPORT_DATABASE_TYPE_LIST.contains(name.toUpperCase())) {
//				registry.removePlugin(DatabasePluginType.class, plugin);
//				iterator.remove();
//			}
        }

        System.out.println("总的识别数据库支持: " + plugins.size());
        System.out.println("是否包含了Hive：" + flag);

        DatabaseMeta databaseMeta = new DatabaseMeta();
        databaseMeta.setName("hive");
        databaseMeta.setDisplayName("hive");
        databaseMeta.setDatabaseType("HIVE2");
        databaseMeta.setUsername(null);
        databaseMeta.setPassword(null);
        databaseMeta.setDBName("default");
        databaseMeta.setDBPort("10000");
        databaseMeta.setHostname("localhost");

        String s = databaseMeta.testConnection();
        System.out.println("testConnection: " + s);
    }
}
