package main;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Properties;

import utils.ConnectionUtil;
import utils.GetJobUtils;

public class GetShell {
	private  Properties properties = new Properties();
	private SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss");
	
	private static Connection con = null;
	private static Statement stmt = null;
	
	/**
	 * 默认读取jar包中的配置文件
	 * @throws IOException
	 */
	public GetShell() throws IOException{
		properties.load(GetShell.class.getResourceAsStream("/config/config1.properties"));
	}
	
	/**
	 * 读取指定的配置文件
	 * @param path 文件路径
	 * @throws IOException 
	 */
	public GetShell(String path) throws IOException{
		InputStream in = new FileInputStream(path);
		properties.load(in);
	}
	
	/**
	 * 根据日志抽取job_name 
	 * @return job_name array
	 * @throws IOException
	 */
	public ArrayList<String> getImportLog() throws IOException{
		HashSet<String>  set = new HashSet<String>();
		InputStreamReader reader = new InputStreamReader(new FileInputStream(properties.getProperty("log_path"))); // 建立一个输入流对象reader
		BufferedReader br = new BufferedReader(reader); // 建立一个对象，它把文件内容转成计算机能读懂的语言
		ArrayList<String> ryArray =new ArrayList<String>();
		String line = "";
		while (line != null) {
			line = br.readLine(); // 一次读入一行数据
			if (line == null || "".equals(line)) {
				continue;
			}
			set.add(line.replaceAll(". . imported ", "").
					split(":")[0].split(" ")[0].replaceAll("\"", ""));
		}
		br.close();
		ryArray.addAll(set);
		System.out.println("抽取"+ryArray.size()+"条 job_name");
		return ryArray;
	}
	
	/**
	 * 根据job_name取得job_id
	 * @param jobNameArray job_name array
	 * @return job_id array
	 * @throws SQLException 
	 */
	public ArrayList<String> getJobId (String inv , Statement stmts) throws SQLException{
		ArrayList<String> jobIdArray = new ArrayList<String>();
		ResultSet rs = stmts.executeQuery(
				"select job_id,job_name from jobinfo where job_name in("
				+ inv
				+ ");"
				);
		while (rs.next()) {
			jobIdArray.add(rs.getString(1));
			//System.out.println(rs.getString(1)+","+rs.getString(2));
		}
		rs.close();
		//System.out.print("取得 "+ jobIdArray.size() +"个Id");
		return jobIdArray;
	}
	
	public static void main(String[] args) throws Exception{
		GetShell shell = new GetShell();
		ArrayList<String> jobNameArray = shell.getImportLog();
		shell.getSrcConnection() ;
		GetJobUtils utils = new GetJobUtils();
		String jobNmaeInv = utils.getJobNameString(jobNameArray);
		ArrayList<String> jobIdArray = shell.getJobId(jobNmaeInv, stmt);
		shell.updateJob(jobNmaeInv, stmt);
		shell.createMianShell("0", jobIdArray, jobNameArray);
	}
	
	
	/**
	 * 根据job_name 更新job状态为1
	 * @param inv job_name in value
	 * @param stmts 
	 * @throws SQLException 
	 */
	public void updateJob(String inv , Statement stmts) throws SQLException{
		stmts.executeUpdate("update jobinfo set enable=1 where job_name in("
				+ inv
				+")"
				);
	}
	
	
	/**
	 * 创建主shell
	 * @throws IOException 
	 */
	public void createMianShell(String dseDBType,ArrayList<String> jobIdArray,ArrayList<String> jobNameArray) throws IOException{
		final String cd = "cd ";
		final String n = "\n";
		final String sh = "sh ";
		String createHabseTableShell = createCreateHabseTableShell(jobIdArray);
		String sparkImportToHbaseShell = createSparkImportToHbaseShell(dseDBType, jobIdArray);
		createReplaceEnterConfig(jobNameArray);
		final String fileName =properties.getProperty("mainShell.path") +properties.getProperty("mainShell.name") 
		+format.format(new Date())+".sh";
		FileWriter fw = new FileWriter(fileName);
		fw.write(cd+ properties.getProperty("createHbaseTableShell.path")+n);
		fw.write(sh + createHabseTableShell+n);
		fw.write(cd+properties.getProperty("sparkImportToHbaseShell.path")+n);
		fw.write(sh + sparkImportToHbaseShell+n);
		fw.write(cd+properties.getProperty("replaceEnter.shellPath")+n);
		fw.write(sh +properties.getProperty("replaceEnter.shellPath")
		+properties.getProperty("replaceEnter.shellName")+n);
		fw.close();
		System.out.println("mian脚本以生成 "+fileName);
	}
	
	/**
	 * 生成去空工具配置文件
	 * @param jobNameArray jobNameArray
	 * @throws IOException
	 */
	public void createReplaceEnterConfig(ArrayList<String> jobNameArray) throws IOException{
		File file = new File(properties.getProperty("replaceEnter.path")+"replaceEnter");
		File filebkpath = new File(properties.getProperty("replaceEnter.path.bk"));
		if (!filebkpath.exists()) {
			filebkpath.mkdirs();  //如果目录不存在创建目录
		}
		File fileBk = new File(properties.getProperty("replaceEnter.path.bk")+"replaceEnter."+format.format(new Date()));
		file.renameTo(fileBk);
		
		String fileName = properties.getProperty("replaceEnter.path")+"replaceEnter";
		FileWriter writer = new FileWriter(fileName);
		for (int i = 0; i < jobNameArray.size(); i++) {
			writer.write(jobNameArray.get(i)+"\n");
		}
		writer.close();
	}
	
	/**
	 * 创建 habse建表脚本
	 * @param jobIdArray
	 * @return 文件名
	 * @throws IOException
	 */
	public String  createCreateHabseTableShell(ArrayList<String> jobIdArray) throws IOException{
		final String spr = " ";
		final String n = "\n";
		final String fileName =properties.getProperty("createHbaseTableShell.path")
				  +"create-hbase-tables-"+format.format(new Date())+".sh";
		FileWriter fw = new FileWriter(fileName);
		final String shell = "sh "+properties.getProperty("createHbaseTableShell.name");
		final String tableSpase = properties.getProperty("createHbaseTableShell.tableSpace");
		for(String v : jobIdArray){
			fw.write(shell+spr+v+spr+v+spr+tableSpase+n);
		}
		fw.close();
		return fileName;
	}
	
	/**
	 * 创建 hbase 导入脚本
	 * @param dseDBType 库类ing 0 oracle 1 mysql
	 * @param jobIdArray jobIdArray
	 * @return 文件名
	 * @throws IOException
	 */
	public String createSparkImportToHbaseShell(String dseDBType,ArrayList<String> jobIdArray) throws IOException{
		final String spr = " ";
		final String n = "\n";
		final String fileName =properties.getProperty("sparkImportToHbaseShell.path")
				  +"spark-import-to-hbase-"+format.format(new Date())+".sh";
		FileWriter fw = new FileWriter(fileName);
		final String shell = "sh "+properties.getProperty("sparkImportToHbaseShell.name");
		for(String v : jobIdArray){
			fw.write(shell+spr+v+spr+v+spr+dseDBType+n);
		}
		fw.close();
		return fileName;
	
	}
	
	
	// 根据配置取得链接
	private  void getSrcConnection() throws Exception {
		con = ConnectionUtil.getConnent(properties.getProperty("url"), properties.getProperty("user"),
				properties.getProperty("password"));
		stmt = con.createStatement();
	}
	
	

}
