package org.databandtech.job.config;

import org.databandtech.job.entity.SaveMeta;
import org.databandtech.job.entity.ScheduledTaskJob;
import org.databandtech.job.jobs.CommandExecuteJob;
import org.databandtech.job.jobs.HdfsBackupJob;
import org.databandtech.job.jobs.HdfsToJdbcSqoop1Job;
import org.databandtech.job.jobs.HdfsToLocalFileJob;
import org.databandtech.job.jobs.HiveSqlExecuteJob;
import org.databandtech.job.jobs.HiveSqlQueryJob;
import org.databandtech.job.sink.HiveSqlQueryJob1MySQLSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;

import java.util.HashMap;
import java.util.Map;

@Configuration
public class TaskConfig {
	
    private static final Logger LOGGER = LoggerFactory.getLogger(TaskConfig.class);
    
    @Bean
    public ThreadPoolTaskScheduler threadPoolTaskScheduler() {
        LOGGER.info("创建定时任务调度线程池 start");
        ThreadPoolTaskScheduler threadPoolTaskScheduler = new ThreadPoolTaskScheduler();
        threadPoolTaskScheduler.setPoolSize(8);
        threadPoolTaskScheduler.setThreadNamePrefix("task-");
        threadPoolTaskScheduler.setWaitForTasksToCompleteOnShutdown(true);
        threadPoolTaskScheduler.setRemoveOnCancelPolicy(true);
        threadPoolTaskScheduler.setAwaitTerminationSeconds(30);
        LOGGER.info("创建定时任务调度线程池 end");
        return threadPoolTaskScheduler;
    }
    

    @Bean(name = "scheduledTaskJobMap")
    public Map<String, ScheduledTaskJob> scheduledTaskJobMap() {
    	Map<String, ScheduledTaskJob> map = new HashMap<String, ScheduledTaskJob>();
    	
    	HdfsBackupJob hdfsBackupJob1 = new HdfsBackupJob("hdfs_toLocal2020",
    			"C:\\logs\\csv\\product\\PC-2020.csv",
    			"hdfs://hadoop001:8020/user/csv/product/",
    			"0/20 * * * * ?");
    	//map.put(hdfsBackupJob1.getKey(), hdfsBackupJob1);
    	   	
    	HdfsToLocalFileJob hdfsToLocalFileJob1 = new HdfsToLocalFileJob("hdfs_tolocal2020",    			
    			"hdfs://hadoop001:8020/user/csv/product/","C:\\logs\\csv\\product\\PC-2020.csv",
    			"0/20 * * * * ?");
    	//map.put(hdfsToLocalFileJob1.getKey(), hdfsToLocalFileJob1);
    	   	
    	HdfsToLocalFileJob hdfsToLocalFileJob2 = new HdfsToLocalFileJob("hdfs_toLocal2020_1",    			
    			"hdfs://hadoop001:8020/user/csv/product/","C:\\logs\\csv\\product\\PC-2020-1.csv",
    			"0/10 * * * * ?");
    	//map.put(hdfsToLocalFileJob2.getKey(), hdfsToLocalFileJob2);
    	   	
		CommandExecuteJob jobWindowsDir1 = new CommandExecuteJob("WindowsDir1","cmd.exe /c dir c:\\","","0/15 * * * * ?");
    	//map.put(jobWindowsDir1.getKey(), jobWindowsDir1);

		CommandExecuteJob jobWindowsIP1 = new CommandExecuteJob("WindowsIP1","ipconfig /all","","0/35 * * * * ?");
	   	//map.put(jobWindowsIP1.getKey(), jobWindowsIP1);
	   	
	   	//####### Hive查詢任務  START ####### 
	   	String url="jdbc:mysql://localhost:3307/databand?useUnicode=true&characterEncoding=utf-8&useSSL=false"; 
	   	String user="root";
	   	String pass="mysql";
		String sql="INSERT INTO fromhive(buycount,citycode,saledatetime) VALUES (?,?,?);"; 
		String[] fieldNames= {"field1","field2"};
		String[] keyField= {"username","orderid"};
		Boolean isInsertOrUpdate = false;
		SaveMeta meta = new SaveMeta(url,user,pass,sql,fieldNames,keyField,isInsertOrUpdate);
		
		HiveSqlQueryJob1MySQLSink mysqlSink = new HiveSqlQueryJob1MySQLSink(meta);
	   	String urlHive ="jdbc:hive2://hadoop001:10000/default";
	   	String sqlHive ="select buycount,citycode,saledatetime from product limit 10";
	   	HiveSqlQueryJob hiveSqlQueryJob1 = new HiveSqlQueryJob("hiveSqlQueryJob1","0/35 * * * * ?",urlHive,sqlHive,mysqlSink);
	   	map.put(hiveSqlQueryJob1.getKey(), hiveSqlQueryJob1);
	   	//####### Hive查詢任務  END ####### 

	   	//注意：hive sql的客户端可以加分号，但jdbc sql里面不行
	   	//CREATE TABLE table123(id int,abc  string) PARTITIONED BY (insertdate string)
	   	//DROP TABLE  table123
	   	//"load data local inpath '/home/data.txt' overwrite into table user_tb"
	   	String sqlHiveExecute ="CREATE TABLE table1234(id int,abc  string)";
	   	HiveSqlExecuteJob hiveSqlExecuteJob1 = new HiveSqlExecuteJob("hiveSqlExecuteJob1",urlHive,sqlHiveExecute,"0/35 * * * * ?");
	   	//map.put(hiveSqlExecuteJob1.getKey(), hiveSqlExecuteJob1);

    	HdfsToJdbcSqoop1Job job1 =new HdfsToJdbcSqoop1Job();   	
    	//map.put(job1.getClass().getSimpleName(), job1);
        return map;
    }

}
