/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.linkis.engineplugin.spark.parse.hql;

import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;

import java.util.HashMap;
import java.util.Map;

/**
 * BaseSemanticAnalyzer.
 *
 */
public class BaseSemanticAnalyzer {
	private static final int[] multiplier = new int[] {1000, 100, 10, 1};

	private static final Map<Integer, String> TokenToTypeName = new HashMap<Integer, String>();
	static {
		TokenToTypeName.put(HiveParser.TOK_BOOLEAN, org.apache.hadoop.hive.serde.serdeConstants.BOOLEAN_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_TINYINT, org.apache.hadoop.hive.serde.serdeConstants.TINYINT_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_SMALLINT, org.apache.hadoop.hive.serde.serdeConstants.SMALLINT_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_INT, org.apache.hadoop.hive.serde.serdeConstants.INT_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_BIGINT, org.apache.hadoop.hive.serde.serdeConstants.BIGINT_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_FLOAT, org.apache.hadoop.hive.serde.serdeConstants.FLOAT_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_DOUBLE, org.apache.hadoop.hive.serde.serdeConstants.DOUBLE_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_STRING, org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_CHAR, org.apache.hadoop.hive.serde.serdeConstants.CHAR_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_VARCHAR, org.apache.hadoop.hive.serde.serdeConstants.VARCHAR_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_BINARY, org.apache.hadoop.hive.serde.serdeConstants.BINARY_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_DATE, org.apache.hadoop.hive.serde.serdeConstants.DATE_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_DATETIME, org.apache.hadoop.hive.serde.serdeConstants.DATETIME_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, org.apache.hadoop.hive.serde.serdeConstants.TIMESTAMP_TYPE_NAME);
		TokenToTypeName.put(HiveParser.TOK_DECIMAL, org.apache.hadoop.hive.serde.serdeConstants.DECIMAL_TYPE_NAME);
	}

	public static String getTypeStringFromAST(ASTNode typeNode)
			throws SemanticException {
		switch (typeNode.getType()) {
			case HiveParser.TOK_LIST:
				return SerdeConstants.LIST_TYPE_NAME + "<"
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ">";
			case HiveParser.TOK_MAP:
				return SerdeConstants.MAP_TYPE_NAME + "<"
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ","
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(1)) + ">";
			case HiveParser.TOK_STRUCT:
				return getStructTypeStringFromAST(typeNode);
			case HiveParser.TOK_UNIONTYPE:
				return getUnionTypeStringFromAST(typeNode);
			default:
				return getTypeName(typeNode);
		}
	}

	public static String getTypeSuffixStringFromAST(ASTNode typeNode)
			throws SemanticException {
		switch (typeNode.getType()) {
			case HiveParser.TOK_LIST:
				return "<"
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ">";
			case HiveParser.TOK_MAP:
				return "<"
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(0)) + ","
						+ getTypeStringFromAST((ASTNode) typeNode.getChild(1)) + ">";
			case HiveParser.TOK_STRUCT:
				return getStructTypeSuffixStringFromAST(typeNode);
			case HiveParser.TOK_UNIONTYPE:
				return getUnionTypeSuffixStringFromAST(typeNode);
			default:
				return "";
		}
	}

	public static String getTypeName(ASTNode node) throws SemanticException {
		int token = node.getType();
		String typeName;

		// datetime type isn't currently supported
		if (token == HiveParser.TOK_DATETIME) {
			throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
		}

		switch (token) {
			case HiveParser.TOK_CHAR:
				CharTypeInfo charTypeInfo = getCharTypeInfo(node);
				typeName = charTypeInfo.getQualifiedName();
				break;
			case HiveParser.TOK_VARCHAR:
				VarcharTypeInfo varcharTypeInfo = getVarcharTypeInfo(node);
				typeName = varcharTypeInfo.getQualifiedName();
				break;
			case HiveParser.TOK_DECIMAL:
				DecimalTypeInfo decTypeInfo = getDecimalTypeTypeInfo(node);
				typeName = decTypeInfo.getQualifiedName();
				break;
			default:
				typeName = TokenToTypeName.get(token);
		}
		return typeName;
	}

	public static DecimalTypeInfo getDecimalTypeTypeInfo(ASTNode node)
			throws SemanticException {
		if (node.getChildCount() > 2) {
			throw new SemanticException("Bad params for type decimal");
		}

		int precision = HiveDecimal.USER_DEFAULT_PRECISION;
		int scale = HiveDecimal.USER_DEFAULT_SCALE;

		if (node.getChildCount() >= 1) {
			String precStr = node.getChild(0).getText();
			precision = Integer.valueOf(precStr);
		}

		if (node.getChildCount() == 2) {
			String scaleStr = node.getChild(1).getText();
			scale = Integer.valueOf(scaleStr);
		}

		return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
	}

	public static CharTypeInfo getCharTypeInfo(ASTNode node)
			throws SemanticException {
		if (node.getChildCount() != 1) {
			throw new SemanticException("Bad params for type char");
		}

		String lengthStr = node.getChild(0).getText();
		return TypeInfoFactory.getCharTypeInfo(Integer.valueOf(lengthStr));
	}

	public static VarcharTypeInfo getVarcharTypeInfo(ASTNode node)
			throws SemanticException {
		if (node.getChildCount() != 1) {
			throw new SemanticException("Bad params for type varchar");
		}

		String lengthStr = node.getChild(0).getText();
		return TypeInfoFactory.getVarcharTypeInfo(Integer.valueOf(lengthStr));
	}

	private static String getStructTypeSuffixStringFromAST(ASTNode typeNode)
			throws SemanticException {
		String typeStr = "<";
		typeNode = (ASTNode) typeNode.getChild(0);
		int children = typeNode.getChildCount();
		if (children <= 0) {
			throw new SemanticException("empty struct not allowed.");
		}
		StringBuilder buffer = new StringBuilder(typeStr);
		for (int i = 0; i < children; i++) {
			ASTNode child = (ASTNode) typeNode.getChild(i);
			buffer.append(unescapeIdentifier(child.getChild(0).getText())).append(":");
			buffer.append(getTypeStringFromAST((ASTNode) child.getChild(1)));
			if (i < children - 1) {
				buffer.append(",");
			}
		}

		buffer.append(">");
		return buffer.toString();
	}


	private static String getStructTypeStringFromAST(ASTNode typeNode)
			throws SemanticException {
		String typeStr = SerdeConstants.STRUCT_TYPE_NAME + "<";
		typeNode = (ASTNode) typeNode.getChild(0);
		int children = typeNode.getChildCount();
		if (children <= 0) {
			throw new SemanticException("empty struct not allowed.");
		}
		StringBuilder buffer = new StringBuilder(typeStr);
		for (int i = 0; i < children; i++) {
			ASTNode child = (ASTNode) typeNode.getChild(i);
			buffer.append(unescapeIdentifier(child.getChild(0).getText())).append(":");
			buffer.append(getTypeStringFromAST((ASTNode) child.getChild(1)));
			if (i < children - 1) {
				buffer.append(",");
			}
		}

		buffer.append(">");
		return buffer.toString();
	}

	private static String getUnionTypeSuffixStringFromAST(ASTNode typeNode)
			throws SemanticException {
		String typeStr = "<";
		typeNode = (ASTNode) typeNode.getChild(0);
		int children = typeNode.getChildCount();
		if (children <= 0) {
			throw new SemanticException("empty union not allowed.");
		}
		StringBuilder buffer = new StringBuilder(typeStr);
		for (int i = 0; i < children; i++) {
			buffer.append(getTypeStringFromAST((ASTNode) typeNode.getChild(i)));
			if (i < children - 1) {
				buffer.append(",");
			}
		}
		buffer.append(">");
		typeStr = buffer.toString();
		return typeStr;
	}

	private static String getUnionTypeStringFromAST(ASTNode typeNode)
			throws SemanticException {
		String typeStr = SerdeConstants.UNION_TYPE_NAME + "<";
		typeNode = (ASTNode) typeNode.getChild(0);
		int children = typeNode.getChildCount();
		if (children <= 0) {
			throw new SemanticException("empty union not allowed.");
		}
		StringBuilder buffer = new StringBuilder(typeStr);
		for (int i = 0; i < children; i++) {
			buffer.append(getTypeStringFromAST((ASTNode) typeNode.getChild(i)));
			if (i < children - 1) {
				buffer.append(",");
			}
		}
		buffer.append(">");
		typeStr = buffer.toString();
		return typeStr;
	}

	/**
	 * Converts parsed key/value properties pairs into a map.
	 *
	 * @param prop ASTNode parent of the key/value pairs
	 *
	 * @param mapProp property map which receives the mappings
	 */
	public static void readProps(
			ASTNode prop, Map<String, String> mapProp) {

		for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
			String key = unescapeSQLString(prop.getChild(propChild).getChild(0)
					.getText());
			String value = null;
			if (prop.getChild(propChild).getChild(1) != null) {
				value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText());
			}
			mapProp.put(key, value);
		}
	}


	/**
	 * Get the unqualified name from a table node.
	 *
	 * This method works for table names qualified with their schema (e.g., "db.table")
	 * and table names without schema qualification. In both cases, it returns
	 * the table name without the schema.
	 *
	 * @param node the table node
	 * @return the table name without schema qualification
	 *         (i.e., if name is "db.table" or "table", returns "table")
	 */
	public static String getUnescapedUnqualifiedTableName(ASTNode node) {
		assert node.getChildCount() <= 2;

		if (node.getChildCount() == 2) {
			node = (ASTNode) node.getChild(1);
		}

		return getUnescapedName(node);
	}


	public static Map.Entry<String, String> getDbTableNamePair(ASTNode tableNameNode) {
		assert(tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME);
		if (tableNameNode.getChildCount() == 2) {
			String dbName = unescapeIdentifier(tableNameNode.getChild(0).getText());
			String tableName = unescapeIdentifier(tableNameNode.getChild(1).getText());
			return Pair.of(dbName, tableName);
		} else {
			String str = tableNameNode.getChild(0).getText();
			if(str.contains(".")){
				String[] pair = str.split("\\.");
				String dbName = pair[0].replace("`", "");
				String tableName = pair[1].replace("`", "");
				return Pair.of(dbName, tableName);
			}else {
				String tableName = unescapeIdentifier(str);
				return Pair.of(null,tableName);
			}
		}
	}

	public static String unescapeIdentifier(String val) {
	    if (val == null) {
	      return null;
	    }
	    if (val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`') {
	      val = val.substring(1, val.length() - 1);
	    }
	    return val;
	  }

	public static String escapeIdentifier(String val) {
		if (val == null) {
			return null;
		}
		return '`' + val + '`';
	}
	
	public static String getUnescapedName(ASTNode tableOrColumnNode) {
	    return getUnescapedName(tableOrColumnNode, null);
	  }
	
	public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) {
	    int tokenType = tableOrColumnNode.getToken().getType();
	    if (tokenType == HiveParser.TOK_TABNAME) {
	      // table node
	      if (tableOrColumnNode.getChildCount() == 2) {
	        String dbName = unescapeIdentifier(tableOrColumnNode.getChild(0).getText());
	        String tableName = unescapeIdentifier(tableOrColumnNode.getChild(1).getText());
	        return dbName + "." + tableName;
	      }
	      String tableName = unescapeIdentifier(tableOrColumnNode.getChild(0).getText());
	      if (currentDatabase != null) {
	        return currentDatabase + "." + tableName;
	      }
	      return tableName;
	    } else if (tokenType == HiveParser.StringLiteral) {
	      return unescapeSQLString(tableOrColumnNode.getText());
	    }
	    // column node
	    return unescapeIdentifier(tableOrColumnNode.getText());
	  }
	
	
	public static String unescapeSQLString(String b) {
	    Character enclosure = null;

	    // Some of the strings can be passed in as unicode. For example, the
	    // delimiter can be passed in as \002 - So, we first check if the
	    // string is a unicode number, else go back to the old behavior
	    StringBuilder sb = new StringBuilder(b.length());
	    for (int i = 0; i < b.length(); i++) {

	      char currentChar = b.charAt(i);
	      if (enclosure == null) {
	        if (currentChar == '\'' || b.charAt(i) == '\"') {
	          enclosure = currentChar;
	        }
	        // ignore all other chars outside the enclosure
	        continue;
	      }

	      if (enclosure.equals(currentChar)) {
	        enclosure = null;
	        continue;
	      }

	      if (currentChar == '\\' && (i + 6 < b.length()) && b.charAt(i + 1) == 'u') {
	        int code = 0;
	        int base = i + 2;
	        for (int j = 0; j < 4; j++) {
	          int digit = Character.digit(b.charAt(j + base), 16);
	          code += digit * multiplier[j];
	        }
	        sb.append((char)code);
	        i += 5;
	        continue;
	      }

	      if (currentChar == '\\' && (i + 4 < b.length())) {
	        char i1 = b.charAt(i + 1);
	        char i2 = b.charAt(i + 2);
	        char i3 = b.charAt(i + 3);
	        if ((i1 >= '0' && i1 <= '1') && (i2 >= '0' && i2 <= '7')
	            && (i3 >= '0' && i3 <= '7')) {
	          byte bVal = (byte) ((i3 - '0') + ((i2 - '0') * 8) + ((i1 - '0') * 8 * 8));
	          byte[] bValArr = new byte[1];
	          bValArr[0] = bVal;
	          String tmp = new String(bValArr);
	          sb.append(tmp);
	          i += 3;
	          continue;
	        }
	      }

	      if (currentChar == '\\' && (i + 2 < b.length())) {
	        char n = b.charAt(i + 1);
	        switch (n) {
	        case '0':
	          sb.append("\0");
	          break;
	        case '\'':
	          sb.append("'");
	          break;
	        case '"':
	          sb.append("\"");
	          break;
	        case 'b':
	          sb.append("\b");
	          break;
	        case 'n':
	          sb.append("\n");
	          break;
	        case 'r':
	          sb.append("\r");
	          break;
	        case 't':
	          sb.append("\t");
	          break;
	        case 'Z':
	          sb.append("\u001A");
	          break;
	        case '\\':
	          sb.append("\\");
	          break;
	        // The following 2 lines are exactly what MySQL does TODO: why do we do this?
	        case '%':
	          sb.append("\\%");
	          break;
	        case '_':
	          sb.append("\\_");
	          break;
	        default:
	          sb.append(n);
	        }
	        i++;
	      } else {
	        sb.append(currentChar);
	      }
	    }
	    return sb.toString();
	  }

	/**
	 * 获取nil的下一个节点
	 * @return node at which descent stopped
	 */
	public static ASTNode findRootNonNullToken(ASTNode tree) {
		while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
			tree = (ASTNode) tree.getChild(0);
		}
		return tree;
	}

	/**
	 * 判断是否是DDL类型的sql：（create，alter，drop）
	 * @param input
	 * @throws SemanticException
	 */
	public static Boolean analyzeIsDDL(ASTNode input) throws SemanticException {
		Boolean result = false;
		ASTNode ast = findRootNonNullToken(input);
		switch (ast.getType()) {
			case HiveParser.TOK_CREATETABLE:
				result = true;
				break;
			case HiveParser.TOK_ALTERTABLE:
				// 修改表的操作
				switch (ast.getChild(1).getType()) {
					case HiveParser.TOK_ALTERTABLE_ADDPARTS:
					case HiveParser.TOK_ALTERTABLE_ADDCOLS:
					case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
					case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
					case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
					case HiveParser.TOK_ALTERTABLE_DROPPARTS:
					case HiveParser.TOK_ALTERTABLE_PROPERTIES:
					case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
					case HiveParser.TOK_ALTERTABLE_RENAME:
					case HiveParser.TOK_ALTERTABLE_RENAMECOL:
					case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
					case HiveParser.TOK_ALTERTABLE_SERIALIZER:
					case HiveParser.TOK_ALTERTABLE_TOUCH:
					case HiveParser.TOK_ALTERTABLE_ARCHIVE:
					case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
					case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
					case HiveParser.TOK_ALTERTABLE_SKEWED:
					case HiveParser.TOK_ALTERTABLE_FILEFORMAT:
					case HiveParser.TOK_ALTERTABLE_LOCATION:
					case HiveParser.TOK_ALTERTABLE_MERGEFILES:
					case HiveParser.TOK_ALTERTABLE_RENAMEPART:
					case HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION:
					case HiveParser.TOK_ALTERTABLE_BUCKETS:
					case HiveParser.TOK_ALTERTABLE_COMPACT:
						result = true;
						break;
				}
				break;
			case HiveParser.TOK_DROPTABLE:
				result = true;
				break;
			case HiveParser.TOK_TRUNCATETABLE:
				result = true;
				break;
			case HiveParser.TOK_MSCK:
				result = true;
				break;
			case HiveParser.TOK_DROPVIEW:
				result = true;
				break;
			case HiveParser.TOK_ALTERVIEW: {
				result = true;
				break;
			}
			case HiveParser.TOK_LOCKTABLE:
				result = true;
				break;
			case HiveParser.TOK_UNLOCKTABLE:
				result = true;
				break;
			case HiveParser.TOK_LOCKDB:
				result = true;
				break;
			case HiveParser.TOK_UNLOCKDB:
				result = true;
				break;
			case HiveParser.TOK_CREATEDATABASE:
				result = true;
				break;
			case HiveParser.TOK_DROPDATABASE:
				result = true;
				break;
			case HiveParser.TOK_SWITCHDATABASE:
				break;
			case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
				result = true;
				break;
			case HiveParser.TOK_ALTERDATABASE_OWNER:
				result = true;
				break;
			case HiveParser.TOK_CREATEROLE:
				result = true;
				break;
			case HiveParser.TOK_DROPROLE:
				result = true;
				break;
			case HiveParser.TOK_GRANT_ROLE:
				result = true;
				break;
			case HiveParser.TOK_REVOKE_ROLE:
				result = true;
				break;
			case HiveParser.TOK_GRANT:
				result = true;
				break;
			case HiveParser.TOK_REVOKE:
				result = true;
				break;
			default:
				result = false;
				// throw new SemanticException("Unsupported command: " + ast);
		}
		return result;
	}


	public static class RowFormatParams {
		String fieldDelim = null;
		String fieldEscape = null;
		String collItemDelim = null;
		String mapKeyDelim = null;
		String lineDelim = null;
		String nullFormat = null;

		public void analyzeRowFormat(ASTNode child) throws SemanticException {
			child = (ASTNode) child.getChild(0);
			int numChildRowFormat = child.getChildCount();
			for (int numC = 0; numC < numChildRowFormat; numC++) {
				ASTNode rowChild = (ASTNode) child.getChild(numC);
				switch (rowChild.getToken().getType()) {
					case HiveParser.TOK_TABLEROWFORMATFIELD:
						fieldDelim = unescapeSQLString(rowChild.getChild(0)
								.getText());
						if (rowChild.getChildCount() >= 2) {
							fieldEscape = unescapeSQLString(rowChild
									.getChild(1).getText());
						}
						break;
					case HiveParser.TOK_TABLEROWFORMATCOLLITEMS:
						collItemDelim = unescapeSQLString(rowChild
								.getChild(0).getText());
						break;
					case HiveParser.TOK_TABLEROWFORMATMAPKEYS:
						mapKeyDelim = unescapeSQLString(rowChild.getChild(0)
								.getText());
						break;
					case HiveParser.TOK_TABLEROWFORMATLINES:
						lineDelim = unescapeSQLString(rowChild.getChild(0)
								.getText());
						if (!lineDelim.equals("\n")
								&& !lineDelim.equals("10")) {
							throw new SemanticException(
									ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg());
						}
						break;
					case HiveParser.TOK_TABLEROWFORMATNULL:
						nullFormat = unescapeSQLString(rowChild.getChild(0)
								.getText());
						break;
					default:
						throw new AssertionError("Unkown Token: " + rowChild);
				}
			}
		}

		public String getFieldDelim() {
			return fieldDelim;
		}

		public String getFieldEscape() {
			return fieldEscape;
		}

		public String getCollItemDelim() {
			return collItemDelim;
		}

		public String getMapKeyDelim() {
			return mapKeyDelim;
		}

		public String getLineDelim() {
			return lineDelim;
		}

		public String getNullFormat() {
			return nullFormat;
		}
	}
}
