package fst.parsing

trait TokenParsers extends Parsers {
  type Lexical <: LexicalCore
  val lexical: Lexical
  type Input = lexical.Input
  type Elem = lexical.Token

  implicit def InputIsParseInput(in: Input) = new ParseInput { val self = in
    import lexical.{errorToken, token, whitespace, Success => TokSuccess, NoSuccess => TokNoSuccess, InputIsParseInput => LexIn }
    
    private val (tok, rest1, rest2) = scan(in)
    
    def scan(in : Input) = {
      def skip(in: Input) = if (LexIn(in).atEnd) in else LexIn(in).rest
      
      whitespace(in) match {
        case TokSuccess(_, in1) => 
          token(in1) match {
            case TokSuccess(tok, in2) => (tok, in1, in2)
            case ns: TokNoSuccess => (errorToken(ns.errorInfo), ns.info, skip(ns.info))
          }
        case ns: TokNoSuccess => (errorToken(ns.errorInfo), ns.info, skip(ns.info))
      }
    }
    

    def first = tok
    def rest = rest2
//    def pos = rest1.pos
    def atEnd = LexIn(in).atEnd || (whitespace(in) match { case TokSuccess(_, in1) => LexIn(in1).atEnd case _ => false })
    def laterThan(o: Input): Boolean = LexIn(in).laterThan(o)
  }
  
}


trait StdTokenParsers extends TokenParsers {
  type Lexical <: StdLexical
  import lexical._
  
  implicit def keyword(s: String): Parser[Token] = accept(Keyword(s))
  def ident: Parser[String] = acceptIf(_.isInstanceOf[Identifier]) ^^ { case Identifier(x) => x}
}