/*
 * Copyright 2016-2018 shardingsphere.io.
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * </p>
 */

package com.swak.parser.lexer;

import java.util.Set;

import com.swak.parser.exception.SqlParseException;
import com.swak.parser.lexer.dialect.h2.H2Lexer;
import com.swak.parser.lexer.dialect.mysql.MySQLLexer;
import com.swak.parser.lexer.dialect.oracle.OracleLexer;
import com.swak.parser.lexer.dialect.postgresql.PostgreSQLLexer;
import com.swak.parser.lexer.dialect.sqlserver.SQLServerLexer;
import com.swak.parser.lexer.token.Assist;
import com.swak.parser.lexer.token.Token;
import com.swak.parser.lexer.token.TokenType;
import com.swak.utils.JDBCDrivers;
import com.swak.utils.Sets;

import lombok.RequiredArgsConstructor;

/**
 * Lexical analysis engine.
 *
 * @author zhangliang
 */
@RequiredArgsConstructor
public final class LexerEngine {

	private final Lexer lexer;

	/**
	 * Get input string.
	 * 
	 * @return inputted string
	 */
	public String getInput() {
		return lexer.getInput();
	}

	/**
	 * Analyse next token.
	 */
	public void nextToken() {
		lexer.nextToken();
	}

	/**
	 * Is end or not.
	 *
	 * @return current token is end token or not.
	 */
	public boolean isEnd() {
		return Assist.END == lexer.getCurrentToken().getType();
	}

	/**
	 * Get current token.
	 * 
	 * @return current token
	 */
	public Token getCurrentToken() {
		return lexer.getCurrentToken();
	}

	/**
	 * Assert current token type should equals input token and go to next token
	 * type.
	 *
	 * @param tokenType token type
	 */
	public void accept(final TokenType tokenType) {
		if (lexer.getCurrentToken().getType() != tokenType) {
			throw new SqlParseException(lexer, tokenType);
		}
		lexer.nextToken();
	}

	/**
	 * Adjust current token equals one of input tokens or not.
	 *
	 * @param tokenTypes to be adjusted token types
	 * @return current token equals one of input tokens or not
	 */
	public boolean equalAny(final TokenType... tokenTypes) {
		for (TokenType each : tokenTypes) {
			if (each == lexer.getCurrentToken().getType()) {
				return true;
			}
		}
		return false;
	}

	/**
	 * Skip current token if equals one of input tokens.
	 *
	 * @param tokenTypes to be adjusted token types
	 * @return skipped current token or not
	 */
	public boolean skipIfEqual(final TokenType... tokenTypes) {
		if (equalAny(tokenTypes)) {
			lexer.nextToken();
			return true;
		}
		return false;
	}

	/**
	 * Skip all input tokens.
	 *
	 * @param tokenTypes to be skipped token types
	 */
	public void skipAll(final TokenType... tokenTypes) {
		Set<TokenType> tokenTypeSet = Sets.newHashSet(tokenTypes);
		while (tokenTypeSet.contains(lexer.getCurrentToken().getType())) {
			lexer.nextToken();
		}
	}

	/**
	 * Skip until one of input tokens.
	 *
	 * @param tokenTypes to be skipped untiled token types
	 */
	public void skipUntil(final TokenType... tokenTypes) {
		Set<TokenType> tokenTypeSet = Sets.newHashSet(tokenTypes);
		tokenTypeSet.add(Assist.END);
		while (!tokenTypeSet.contains(lexer.getCurrentToken().getType())) {
			lexer.nextToken();
		}
	}

	/**
	 * Throw unsupported exception if current token equals one of input tokens.
	 * 
	 * @param tokenTypes to be adjusted token types
	 */
	public void unsupportedIfEqual(final TokenType... tokenTypes) {
		if (equalAny(tokenTypes)) {
			throw new SqlParseException(lexer.getCurrentToken().getType());
		}
	}

	/**
	 * Throw unsupported exception if current token not equals one of input tokens.
	 *
	 * @param tokenTypes to be adjusted token types
	 */
	public void unsupportedIfNotSkip(final TokenType... tokenTypes) {
		if (!skipIfEqual(tokenTypes)) {
			throw new SqlParseException(lexer.getCurrentToken().getType());
		}
	}

	/**
	 * Create lexical analysis engine instance.
	 * 
	 * @param dbType database type
	 * @param sql    SQL
	 * @return lexical analysis engine instance
	 */
	public static LexerEngine newInstance(final String type, final String sql) {

		if (JDBCDrivers.H2.equals(type)) {
			return new LexerEngine(new H2Lexer(sql));
		}
		if (JDBCDrivers.MYSQL.equals(type)) {
			return new LexerEngine(new MySQLLexer(sql));
		}
		if (JDBCDrivers.SQL_SERVER.equals(type)) {
			return new LexerEngine(new SQLServerLexer(sql));
		}
		if (JDBCDrivers.ORACLE.equals(type)) {
			return new LexerEngine(new OracleLexer(sql));
		}
		if (JDBCDrivers.POSTGRESQL.equals(type)) {
			return new LexerEngine(new PostgreSQLLexer(sql));
		}
		throw new SqlParseException(String.format("Cannot support database [%s].", type));
	}
}
