package com.searshc.hadoop.migration.util;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class PatternConstants {
	 private static Properties properties = null;
	 public static String NAMENODE=null;
	 public static String NAMENODE_NO_PORT=null;
	 public static String APPPATH=null;
	 public static String JOBTRACKER=null;
	 static {		
		// load a properties file
		properties = new  Properties();
		try {
			loadProperties("Application.properties");
			String targetServer = properties.getProperty("target_server");

			if (targetServer.equalsIgnoreCase("INTEGRATION")) {
				loadProperties("INTEGRATION.properties");
			} else if (targetServer.equalsIgnoreCase("PRODUCTION")) {
				loadProperties("PRODUCTION.properties");
			} else {
				throw new Exception("Target sever value is invalid.");
			}
		} catch (Exception e) {
            throw new ExceptionInInitializerError("Exception occured while initializing properties value");
		}		
		NAMENODE = properties.getProperty("Namenode");
		NAMENODE_NO_PORT = properties.getProperty("Namenode_no_node");
		JOBTRACKER = properties.getProperty("JobTracker");
	}
	private static void loadProperties(String fileName) throws Exception {
		try {
			properties.load(PatternConstants.class.getClassLoader().getResourceAsStream(fileName));
		} catch (Exception e) {
			throw e;
		}
	}

	public static final Map<String,String> pigScriptIssuePatternErrorMsgMap = Collections.unmodifiableMap(new HashMap<String,String>(2, 1.0f){
		{
			put("(DATETIME|RANK|DENSE|CUBE|ROLLUP)", "Field names can't be DATETIME or RANK or DENSE or CUBE or ROLLUP."); 
			put("(using(\\s+)\"skewed\")", "Replace double quotes with single quotes, in other words \"skewed\" with 'skewed'.");
        	put("(using(\\s+)\"merge\")", "Replace double quotes with single quotes, in other words \"merge\" with 'merge'."); 
        	put("(using(\\s+)\"replicated\")", "Replace double quotes with single quotes, in other words \"replicated\" with 'replicated'."); 
        	put("(using(\\s+)\"collected\")", "Replace double quotes with single quotes, in other words \"collected\" with 'collected'."); 
        	put("=((\\s?|\\s+)\\(\\w+\\);)", "Meaningless syntax like B=(A); is not allowed anymore."); 
        	put("(filter(.+)by(.+)as)", "'as' clause after the 'filter by' operator is not allowed anymore. For details go <a href=\"https://cwiki.apache.org/confluence/display/PIG/Pig+0.9+Backward+Compatibility\">here</a>"); 
        	put("(group(.+)by(.+)as)", "'as' clause after the 'group by' operator is not allowed anymore. For details go <a href =\"https://cwiki.apache.org/confluence/display/PIG/Pig+0.9+Backward+Compatibility\">here</a>"); 
        	put("(filter(.+)by(.+)PARALLEL)", "WARNING: PARALLEL on operators that do not start a reducer is deprecated. For details go <a href =\"https://cwiki.apache.org/confluence/display/PIG/Pig+0.9+Backward+Compatibility\">here</a>");        	
        	put("(\\(-\\d+\\))", "WARNING: (-number) is treated as a tuple with column value -number. Make sure it doesn't affect your expected results. For details go <a href =\"https://cwiki.apache.org/confluence/display/PIG/Pig+0.9+Backward+Compatibility\">here</a>");    
        	put("(generate(\\s+)flatten(\\s?|\\s+)\\((\\w+)\\))(\\s?|\\s+);", "Need to use a schema while flattening bags. For details go <a href=\"https://issues.apache.org/jira/browse/PIG-1627\">here</a>");
        	put("store(\\s+)(\\w+)(\\s+)into(\\s+)('(\\w+)'|\"(\\w+)\");", "Need to use storage schema e.g. store B into 'output' using PigStorage('\t', '-schema');");
        	put("stream(.+)through(.+)(\\-\\w+)(.+)\\3", "Each streaming command option can only be used only once. For details go <a href =\"https://cwiki.apache.org/confluence/display/PIG/Pig+0.9+Backward+Compatibility\">here</a>");
        	put("load(.+)using(.+)BinStorage(.+)", "WARNING: Converting fields loaded from BinStorage will now result an error. For details go <a href =\"https://issues.apache.org/jira/i#browse/PIG-1745\">here</a>");
        	put("(DEFINE CMD|STREAM(.+)THROUGH)(.+)(input|output|ship|cache|stderr)(.+)\\4", "Streaming command options like input output ship can only be used once. For details go <a href =\"https://issues.apache.org/jira/i#browse/PIG-1745\">here</a>");
        	put("(.+)(SIZE|COUNT)\\(\\*\\)(.+)","SIZE(*) and COUNT(*) no longer equivalent of SIZE($0) and COUNT($0), now SIZE(*) and COUNT(*) will fail at typechecking.");
        }
    });
	public static final Map<String,String> hiveScriptIssuePatternErrorMsgMap = Collections.unmodifiableMap(new HashMap<String,String>(2, 1.0f){
		{
		
		}
	});
	public static final Map<String,String> shellScriptIssuePatternErrorMsgMap = Collections.unmodifiableMap(new HashMap<String,String>(2, 1.0f){
		{
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-lsr(.+)","lsr command is not valid anymore. Use 'ls -R' command");
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-dus(.+)","dus command is not valid anymore. Use 'du -s' command");
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-rmr(.+)","rmr command is not valid anymore. Use 'rm -r' command");
			put("(.+)\\-eq(\\s+)(?!0\\b)(?!1\\b)(?!\\-1\\b)","Exit codes are standardised now. For details go<a href =\"https://issues.apache.org/jira/secure/attachment/12509950/Grid_FsShellImpact.pdf\">here</a>");
			put("dfs\\.block\\.size", "Use dfs.blocksize instead");
		}
	});
	public static final Map<String,Integer> shellScriptPathIssuePatternErrorMsgMap = Collections.unmodifiableMap(new HashMap<String,Integer>(2, 1.0f){
		{
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-cat(\\s+)(.+)",5);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-chgrp(\\s+)(\\-R(\\s+))?(.+)(\\s+)(.+)",9);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-chmod(\\s+)(\\-R(\\s+))?(.+)(\\s+)(.+)",9);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-chown(\\s+)(\\-R(\\s+))?(.+)(\\s+)(.+)",9);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-copyFromLocal(\\s+)(.+)(\\s+)(.+)",6);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-copyToLocal(\\s+)(\\-R(\\s+))?(.+)(\\s+)(.+)",9);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-count(\\s+)(\\-q(\\s+))?(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-cp(\\s+)(.+)",5);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-du(\\s+)(.+)",5);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-get(\\s+)(\\-(ignorecrc|crc)(\\s+))?(.+)(\\s+)(.+)",8);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-getmerge(\\s+)(.+)(\\s+)(.+)",4);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-ls(\\s+)(.+)",5);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-moveFromLocal(\\s+)(.+)(\\s+)(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-moveToLocal(\\s+)(\\-crc(\\s+))?(.+)(\\s+)(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-put(\\s+)(.+)(\\s+)(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-setrep(\\s+)(\\-R(\\s+))?(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-stat(\\s+)(.+)",5);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-tail(\\s+)(\\-f(\\s+))?(.+)",7);
			put("hadoop(\\s+)(fs|dfs)(\\s+)\\-text(\\s+)(.+)",5);
//			hadoop fs -text <src>  
		}
	});
	public static final Map<String,String> javaIssuePatternErrorMsgMap = Collections.unmodifiableMap(new HashMap<String,String>(2, 1.0f){
		{		
			put("(org.apache.pig.impl.logicalLayer.parser.ParseException|ParseException)", "Replace org.apache.pig.impl.logicalLayer.parser.ParseException with Exception thrown by method Utils.getSchemaFromString");
			put("(.+)jdbc\\:hive\\://(.+)", "Need to change JDBC driver URL from jdbc:hive://hostname:port to jdbc:hive2://hostname:port");
			put("(.+)org\\.apache\\.hadoop\\.hive\\.jdbc\\.HiveDriver(.+)", "Need to change driver name to org.apache.hive.jdbc.HiveDriver");
			put("org\\.apache\\.hadoop\\.mapred\\.Task\\$Counter", "Need use new group name org.apache.hadoop.mapreduce.TaskCounter instead.");
			put("org\\.apache\\.hadoop\\.mapred\\.JobInProgress\\$Counter", "Need use new group name org.apache.hadoop.mapreduce.JobCounter instead.");
			put("FileSystemCounters", "Need use new group name org.apache.hadoop.mapreduce.FileSystemCounter instad.");
		}
	});
	public static final String SCRIPT_EXTENTION_PIG="pig";
	public static final String SCRIPT_EXTENTION_HIVE="hive";
	public static final String SCRIPT_EXTENTION_SHELL="sh";
	public static final String SCRIPT_EXTENTION_JAVA="java";	
	public static final Map<String,Integer> scripExtentioNumMap = Collections.unmodifiableMap(new HashMap<String,Integer>(2, 1.0f){
		{
        	put(SCRIPT_EXTENTION_PIG, 1);    
        	put(SCRIPT_EXTENTION_HIVE, 2); 
        	put(SCRIPT_EXTENTION_SHELL, 3); 
        	put(SCRIPT_EXTENTION_JAVA, 4); 
			
        }
    });
	public static final Map<String,Map<String,String>> scripExtentionIssuePatternsMap = Collections.unmodifiableMap(new HashMap<String,Map<String,String>>(2, 1.0f){
		{
        	put(SCRIPT_EXTENTION_PIG, pigScriptIssuePatternErrorMsgMap);    
        	put(SCRIPT_EXTENTION_HIVE, hiveScriptIssuePatternErrorMsgMap); 
        	put(SCRIPT_EXTENTION_SHELL, shellScriptIssuePatternErrorMsgMap); 
        	put(SCRIPT_EXTENTION_JAVA, javaIssuePatternErrorMsgMap); 
			
        }
    });
}
