package com.gitee.l0km.jsonvisitor.pairmatcher;

import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.Stack;

import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.misc.Interval;
import com.gitee.l0km.jsonvisitor.AggregateFunction;
import com.gitee.l0km.jsonvisitor.Function;
import com.gitee.l0km.jsonvisitor.Predicate;
import com.gitee.l0km.jsonvisitor.exception.ParseException;
import com.gitee.l0km.jsonvisitor.parser.JsonPairLexer;
import com.gitee.l0km.jsonvisitor.parser.JsonPairParser;
import com.gitee.l0km.jsonvisitor.parser.JsonPairParser.ArrContext;
import com.gitee.l0km.jsonvisitor.parser.JsonPairParser.ObjContext;
import static com.gitee.l0km.jsonvisitor.utils.CharStreamSupport.lexerOf;
import static com.gitee.l0km.jsonvisitor.utils.CharStreamSupport.parserOf;
import static com.gitee.l0km.jsonvisitor.utils.CharStreamSupport.asCharStream;
import static com.gitee.l0km.jsonvisitor.utils.InterpreterUtils.quote;
import static com.gitee.l0km.jsonvisitor.utils.InterpreterUtils.unquote;

public class JsonPairMatcher {
//	JsonPairLexer lexer;
//	Predicate<String>keyFilter;
//	ValueFunction valueFunction;
//	AggregateFunction<String> resultAggreate;
//	public JsonPairMatcher(JsonPairLexer lexer, Predicate<String> keyFilter,
//			ValueFunction valueFunction, AggregateFunction<String> resultAggreate) {
//		this.lexer = lexer;
//		this.keyFilter = keyFilter;
//		this.valueFunction = valueFunction;
//		this.resultAggreate = resultAggreate == null ? new Agg<String>() : resultAggreate;
//	}
//	public JsonPairMatcher(Object input, Predicate<String> keyFilter,
//			ValueFunction valueFunction) {
//		this(lexerOf(asCharStream(input),JsonPairLexer.class), keyFilter, valueFunction, null);
//	}
//	public JsonPairMatcher(Object input, String key,
//			int ttype) {
//		this(lexerOf(asCharStream(input),JsonPairLexer.class), 
//				new KeyFilter(key), 
//				new TokenFunctionImpl(JsonPairLexer.STRING), 
//				null);
//	}
//
//
//	public String matchPair() {
//		return matchPair(lexer,keyFilter, null);
//	}
	/**
	 * 创建ANTLR4生成的解析器对象{@link JsonPairParser}
	 * 
	 * @param input 待解析的数据
	 */
	public static JsonPairLexer jsonPairLexerOf(Object input) {
		return lexerOf(asCharStream(input), JsonPairLexer.class);
	}

	/**
	 * 跳过当前值对
	 * 
	 * @param lexer
	 * @param stack
	 */
	private static void skipPair(JsonPairLexer lexer, Stack<Integer> stack) {
		Token t;
		while ((t = lexer.nextToken()).getType() != Token.EOF) {
			// log("{}:{}",JsonPairLexer.VOCABULARY.getSymbolicName(t.getType()),t.getText());
			if (stack.isEmpty()) {
				if (t.getType() == JsonPairLexer.COMMA || t.getType() == JsonPairLexer.RBRACE) {
					return;
				}
			}
			switch (t.getType()) {
			case JsonPairLexer.LBRACE:
			case JsonPairLexer.LBRACKET:
				stack.push(t.getType());
				break;
			case JsonPairLexer.RBRACE:
				if (stack.pop() != JsonPairLexer.LBRACE) {
					throw new ParseException(String.format("MISMATCH TOKEN,%s REUIRED",
							JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.LBRACE)));
				}
				break;
			case JsonPairLexer.RBRACKET:
				if (stack.pop() != JsonPairLexer.LBRACKET) {
					throw new ParseException(String.format("MISMATCH TOKEN,%s REUIRED",
							JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.LBRACKET)));
				}
				break;
			default:
				break;
			}
		}
	}
	/**
	 * 跳到 {@code tagetType} 指定的令牌位置当前值对
	 * @param lexer
	 * @param tagetToken
	 * @param stack
	 */
	private static Token skipTo(JsonPairLexer lexer, int tagetToken, Stack<Integer> stack) {
		Token t;
		while ((t = lexer.nextToken()).getType() != Token.EOF) {
			// log("{}:{}",JsonPairLexer.VOCABULARY.getSymbolicName(t.getType()),t.getText());
			if (stack.isEmpty()) {
				if (t.getType() == tagetToken) {
					return t;
				}
			}
			switch (t.getType()) {
			case JsonPairLexer.LBRACE:
			case JsonPairLexer.LBRACKET:
				stack.push(t.getType());
				break;
			case JsonPairLexer.RBRACE:
				if (stack.pop() != JsonPairLexer.LBRACE) {
					throw new ParseException(String.format("MISMATCH TOKEN,%s REUIRED",
							JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.LBRACE)));
				}
				break;
			case JsonPairLexer.RBRACKET:
				if (stack.pop() != JsonPairLexer.LBRACKET) {
					throw new ParseException(String.format("MISMATCH TOKEN,%s REUIRED",
							JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.LBRACKET)));
				}
				break;
			default:
				break;
			}
		}
		throw new ParseException("UNEXPECT EOF");
	}
	public static String matchPair(JsonPairLexer lexer,Predicate<String>keyFilter, ValueFunction valueFunction) {
		Token t = lexer.nextToken();
		if (JsonPairLexer.LBRACE == t.getType()) {
			while ((t = lexer.nextToken()).getType() != Token.EOF && t.getType() != JsonPairLexer.RBRACE) {
				// log("{}:{}",JsonPairLexer.VOCABULARY.getSymbolicName(t.getType()),t.getText());
				Token key = t;
				if (key.getType() != JsonPairLexer.STRING) {
					throw new ParseException(String.format("INVALID token,%s REQUIRED,%s",
							JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.STRING), key));
				}
				if (keyFilter.test(key.getText())) {
					Token colon = lexer.nextToken();
					if (colon.getType() != JsonPairLexer.COLON) {
						throw new ParseException(String.format("INVALID token,%s REQUIRED,%s",
								JsonPairLexer.VOCABULARY.getSymbolicName(JsonPairLexer.COLON), colon));
					}
					Token value = lexer.nextToken();
					if (value.getType() == Token.EOF) {
						throw new ParseException("UNEXPECT EOF");
					}
					if(valueFunction instanceof TokenFunction) {
						return ((TokenFunction)valueFunction).apply(value);
					}else if(valueFunction instanceof ObjectFunction && value.getType() == JsonPairLexer.LBRACE) {
						Token right = skipTo(lexer,JsonPairLexer.RBRACE,new Stack<Integer>());
						String text = lexer.getInputStream().getText(Interval.of(value.getStartIndex(), right.getStopIndex()));
						ObjContext ctx = parserOf(text,JsonPairLexer.class , JsonPairParser.class).obj();
						return ((ObjectFunction)valueFunction).apply(ctx);
					}else if(valueFunction instanceof ArrayFunction && value.getType() == JsonPairLexer.LBRACKET) {
						Token right = skipTo(lexer,JsonPairLexer.RBRACKET,new Stack<Integer>());
						String text = lexer.getInputStream().getText(Interval.of(value.getStartIndex(), right.getStopIndex()));
						ArrContext ctx = parserOf(text,JsonPairLexer.class , JsonPairParser.class).arr();
						return ((ArrayFunction)valueFunction).apply(ctx);
					}else {
						return null;
					}
//					if (value.getType() == JsonPairLexer.STRING) {
//						return InterpreterUtils.unquote(value.getText());
//					}
				} else {
					skipPair(lexer, new Stack<Integer>());
				}
			}
		}
		return null;
	}

	public static String matchPair(Object input, Predicate<String>keyFilter,ValueFunction valueFunction) {
		return matchPair(jsonPairLexerOf(input),keyFilter, valueFunction);
	}
	public static String matchPair(Object input, String key) {
		return matchPair(jsonPairLexerOf(input),new KeyFilter(key), new TokenFunctionImpl(JsonPairLexer.STRING));
	}
	public static class KeyFilter implements Predicate<String>{
		private final String key;
		public KeyFilter(String key) {
			this.key = quote(key);
		}
		@Override
		public boolean test(String input) {
			return Objects.equals(key, input);
		}
		
	}
	private static final  Set<Integer> VALUE_TOKENS;
	private static final  Set<String> VALUE_TOKEN_NAMES;
	private static final  Set<Integer> VALUE_RULES;
	private static final  Set<String> VALUE_RULE_NAMES;
	static {
		
		List<Integer> tokens = Arrays.asList(
				JsonPairLexer.STRING,
				JsonPairLexer.BOOL,
				JsonPairLexer.NULL,
				JsonPairLexer.NUMBER);
		VALUE_TOKENS = Collections.unmodifiableSet(new HashSet<>(tokens));
		Set<String> tokenNames = new HashSet<>();
		for(Integer t:tokens) {
			tokenNames.add(JsonPairLexer.VOCABULARY.getSymbolicName(t));
		}
		VALUE_TOKEN_NAMES = Collections.unmodifiableSet(tokenNames);
		
		List<Integer> rules = Arrays.asList(JsonPairParser.RULE_obj,JsonPairParser.RULE_arr);
		VALUE_RULES = Collections.unmodifiableSet(new HashSet<>(rules));
		Set<String> ruleNames = new HashSet<>();
		for(Integer r:rules) {
			ruleNames.add(JsonPairLexer.ruleNames[r]);
		}
		VALUE_RULE_NAMES = Collections.unmodifiableSet(ruleNames);
	}
	public static class ObjectFunctionImpl implements ObjectFunction {
		public ObjectFunctionImpl() {
		}
		@Override
		public String apply(ObjContext ctx) {
			return ctx.getText();
		}		
	}
	public static class ArrayFunctionImpl implements ArrayFunction {
		public ArrayFunctionImpl() {
		}
		@Override
		public String apply(ArrContext ctx) {
			return ctx.getText();
		}		
	}
	public static class TokenFunctionImpl implements TokenFunction{
		private final Integer ttype;
		public TokenFunctionImpl(String type) {
			if (VALUE_RULE_NAMES.contains(type)) {
				int c = 0;
				for (String n : JsonPairLexer.ruleNames) {
					if (n.equals( type)) {
						break;
					}
					c++;
				}
				 this.ttype = c;
			} else if (null==type) {
				this.ttype = null;
			} else {
				throw new IllegalArgumentException("INVALID TYPE " + type);
			}
		}
		public TokenFunctionImpl(int ttype) {
			if(VALUE_TOKENS.contains(ttype)) {
				this.ttype = ttype;
			}else {
				throw new IllegalArgumentException(
						String.format( "INVALID TOKEN TYPE %d,%s required", ttype,VALUE_TOKENS));
			}
		}
		@Override
		public String apply(Token t) {
			if (null == ttype || t.getType() == ttype) {
				if (t.getType() == JsonPairLexer.STRING) {
					return unquote(t.getText());
				}
				return t.getText();
			}
			return null;
		}
		
	}
	public static class Agg<T> implements AggregateFunction<T>{
		@Override
		public T reduce(T l, T r) {
			if(l != null) {
				return l;
			}
			return r;
		}
	}
	public static interface ObjectFunction extends Function<ObjContext, String>,ValueFunction{
		
	}
	public static interface ArrayFunction extends Function<ArrContext, String>,ValueFunction{
		
	}
	public static interface TokenFunction extends Function<Token, String>,ValueFunction{
		
	}
	public static interface ValueFunction{
		
	}
}
