/*
 * http://code.google.com/p/simple-lexing-parsers-4-scala/
 * 
 * Copyright (c) 2011, Sanjay Dasgupta
 * All rights reserved.
 * 
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 * 
 *     * Redistributions of source code must retain the above copyright
 *       notice, this list of conditions and the following disclaimer.
 *     * Redistributions in binary form must reproduce the above copyright
 *       notice, this list of conditions and the following disclaimer in the
 *       documentation and/or other materials provided with the distribution.
 *     * The name of the author may not be used to endorse or promote products
 *       derived from this software without specific prior written permission.
 * 
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
 * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 */

package slp

import scala.collection._
import scala.util.matching.Regex
import scala.util.parsing.combinator.{RegexParsers, Parsers}
import scala.util.parsing.input.CharSequenceReader

trait SimpleLexingParsers extends RegexParsers {

  type P = Parser[String]

  private def escapeRegexMetachars(es: String) = {
    val sb = new StringBuilder()
    for (c <- es) {
      c match {
        case '|' | '*' | '+' | '?' | '-' => sb.append('\\').append(c)
        case '(' | ')' | '[' | ']' | '{' | '}' => sb.append('\\').append(c)
        case ',' | '^' | '.' | '$' | '\"' => sb.append('\\').append(c)
        case _ => sb.append(c)
      }
    }
    sb.toString
  }
  
  def literal$(s: String) = super.literal(s)
  def regex$(r: Regex) = super.regex(r)

//  def back[T](p: Parser[T]): Parser[T] = Parser(in => p(new CharSequenceReader(in.source, backOffsets(in.offset))))

  implicit override def literal(lit: String): P = {
    if ((lit eq null) || lit.isEmpty)
      throw new IllegalArgumentException("Null or empty literal string")
    val lit2 = escapeRegexMetachars(lit)
    if (tokenMap.contains(lit2))
      tokenMap(lit2)
    else {
      if (lexer ne null)
        throw new IllegalStateException("Define literal(%s) before use".format(lit))
      val id = tokens.size
      tokens.append(Left(lit2))
      val parser = parserById(id)
      tokenMap(lit2) = parser
      parser
    }
  }

  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String, lit6: String, 
       lit7: String, lit8: String, lit9: String, lit10: String): Tuple10[P, P, P, P, P, P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5), literal(lit6), 
       literal(lit7), literal(lit8), literal(lit9), literal(lit10))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String, lit6: String, 
       lit7: String, lit8: String, lit9: String): Tuple9[P, P, P, P, P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5), literal(lit6), 
       literal(lit7), literal(lit8), literal(lit9))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String, lit6: String, 
       lit7: String, lit8: String): Tuple8[P, P, P, P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5), literal(lit6), 
       literal(lit7), literal(lit8))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String, lit6: String, 
       lit7: String): Tuple7[P, P, P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5), literal(lit6), 
       literal(lit7))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String, lit6: String
        ): Tuple6[P, P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5), literal(lit6))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String, lit5: String): Tuple5[P, P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4), literal(lit5))
  def literal(lit1: String, lit2: String, lit3: String, lit4: String): Tuple4[P, P, P, P] = 
       (literal(lit1), literal(lit2), literal(lit3), literal(lit4))
  def literal(lit1: String, lit2: String, lit3: String): Tuple3[P, P, P] = (literal(lit1), literal(lit2), literal(lit3))
  def literal(lit1: String, lit2: String): Tuple2[P, P] = (literal(lit1), literal(lit2))

  implicit override def regex(reg: Regex): P = {
    if (reg eq null)
      throw new IllegalArgumentException("Null regex value")
    val regAsString = reg.toString
    if (regAsString.isEmpty)
      throw new IllegalArgumentException("Empty regex string")
    if (tokenMap.contains(regAsString))
      tokenMap(regAsString)
    else {
      if (lexer ne null)
        throw new IllegalStateException("Define regex(%s) before use".format(reg))
      val id = tokens.size
      tokens.append(Right(regAsString))
      val parser = parserById(id)
      tokenMap(regAsString) = parser
      parser
    }
  }

  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex, reg6: Regex, 
       reg7: Regex, reg8: Regex, reg9: Regex, reg10: Regex): Tuple10[P, P, P, P, P, P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5), regex(reg6), 
       regex(reg7), regex(reg8), regex(reg9), regex(reg10))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex, reg6: Regex, 
       reg7: Regex, reg8: Regex, reg9: Regex): Tuple9[P, P, P, P, P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5), regex(reg6), 
       regex(reg7), regex(reg8), regex(reg9))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex, reg6: Regex, 
       reg7: Regex, reg8: Regex): Tuple8[P, P, P, P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5), regex(reg6), 
       regex(reg7), regex(reg8))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex, reg6: Regex, 
       reg7: Regex): Tuple7[P, P, P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5), regex(reg6), 
       regex(reg7))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex, reg6: Regex 
       ): Tuple6[P, P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5), regex(reg6))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex, reg5: Regex): Tuple5[P, P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4), regex(reg5))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex, reg4: Regex): Tuple4[P, P, P, P] = 
       (regex(reg1), regex(reg2), regex(reg3), regex(reg4))
  def regex(reg1: Regex, reg2: Regex, reg3: Regex): Tuple3[P, P, P] = (regex(reg1), regex(reg2), regex(reg3))
  def regex(reg1: Regex, reg2: Regex): Tuple2[P, P] = (regex(reg1), regex(reg2))

  private def parserById(id: Int): P = {
    def formatToken(tid: Int) = tokens(tid) match {case Left(lit) => "literal(%s)".format(lit); case Right(reg) => "regex(%s)".format(reg)}
    Parser(in => {
        if (lexer eq null)
          setupLexer()
        if (traceTokens)
          printf("Trying %s @ (%d,%d)%n", formatToken(id), in.pos.line, in.pos.column)
        if (in.atEnd)
          Failure("End-of-input found", in)
        else globalTokenLexer(in.source.asInstanceOf[String], in.offset) match {
          case Triple(null, matchId, charsConsumed) =>
            val next = in.drop(charsConsumed)
            if (next.atEnd) {
              if (traceTokens)
                printf("Failure %s @ (%d,%d)%n", "End-of-input found", in.pos.line, in.pos.column)
              Failure("End-of-input found", in)
            } else {
              if (traceTokens)
                printf("Failure %s @ (%d,%d)%n", formatToken(id), in.pos.line, in.pos.column)
              Failure("Unknown-inputs found", in)
            }
          case Triple(matchString, matchId, charsConsumed) => 
            if (matchId == id) {
              val next = in.drop(charsConsumed)
              if (traceTokens)
                printf("Success %s @ (%d,%d)%n", formatToken(id), next.pos.line, next.pos.column)
              Success(matchString, next)
            } else {
              if (traceTokens)
                printf("Failure %s @ (%d,%d)%n", formatToken(id), in.pos.line, in.pos.column)
              Failure("expected #%d (%s), found #%d (%s)".format(id, formatToken(id), matchId, formatToken(matchId)), in)
            }
        }
      })
  }
  
  private def globalTokenLexer(inStr: String, offset: Int): LexResults = {
    if (offset > maxOffset) {
      val postSpaces = handleWhiteSpace(inStr, offset)
      val res = lexer.lex(inStr, postSpaces)
      val lexResult = (res._1, res._2, if (res._1 eq null) (postSpaces - offset) else (postSpaces - offset + res._1.length))
      lexResultsCache(offset) = lexResult
      maxOffset = offset
//      backOffsets(offset) = lastOffset
//      lastOffset = offset
      lexResult
    } else {
      lexResultsCache(offset)
    }
  }
  
  override def phrase[T](p: Parser[T]): Parser[T] = {
    lexResultsCache.clear()
//    backOffsets.clear()
//    lastOffset = -1
    maxOffset = -1
    super.phrase(p)
  }

  private def setupLexer() {
    tokenMap("\\z") = super.regex("\\z".r)
    lastCallForTokens()
    lexer = new Lexer(tokens.map(_ match {case Left(lit) => lit; case Right(reg) => reg}).toArray)
  }

  def dumpTokens() {
    lastCallForTokens()
    tokens.foreach(_ match {
        case Left(lit) => "literal(%s)".format(lit) 
        case Right(reg) => "regex(%s)".format(reg)
      })
  }

  protected def lastCallForTokens() {
  }

  private val tokens = mutable.Buffer[Either[String, String]]()
  private var lexer: Lexer = null
  private val tokenMap = mutable.HashMap[String, P]()
  var traceTokens = false
//  private val backOffsets = mutable.HashMap[Int, Int]()
//  private var lastOffset = -1
  private var maxOffset = -1
  private type LexResults = Triple[String, Int, Int] // matched-string, token-id, chars-consumed
  private val lexResultsCache = mutable.HashMap[Int, LexResults]()
}
