// scaup - Scala up!
// (c) 2009, Normen Müller, normen.mueller@gmail.com  
// $Id: Lexer.scala 86 2010-05-11 21:32:13Z normen.mueller $
package locutor.g.parsing

import scala.util.parsing.combinator._
import scala.util.parsing.combinator.lexical._
import scala.util.parsing.input.CharArrayReader.EofCh

/** A lexer for dependency graphs.
 * 
 *  @author <a href="mailto:normen.mueller@googlemail.com">Normen M&#xFC;ller</a>
 *  @version 0.0.4
 */
private [g] class Lexer extends StdLexical with RegexParsers {
  override type Elem = Char
  
  override def token = 
    ( string ^^ StringLit
    | number ^^ NumericLit
    | delim    
    | id     ^^ {case i if reserved contains i => Keyword(i) case i => Identifier(i)}
    | EofCh ^^^ EOF
    | '\"' ~> failure("Unterminated string")
    | failure("Illegal character")
    )
  
  override def whitespace = rep(whitespaceChar | "\\" | '#' ~ rep(chrExcept(EofCh, '\n')))
  
  lazy val string =      
    ( '\"' ~> rep(charSeq | chrExcept('\"', '\n', EofCh)) <~ '\"' ^^ { _ mkString "" }
    | '\'' ~> rep(charSeq | chrExcept('\'', '\n', EofCh)) <~ '\'' ^^ { _ mkString "" }    
    )
  
  lazy val number = """-?(\d+(\.\d*)?|\d*\.\d+)([eE][+-]?\d+)?[fFdD]?""".r
  
  lazy val id = rep(letter | digit | elem("underscore", _=='_')) ^^ { _ mkString "" }
  
  lazy val charSeq = 
    ( '\\' ~ '\"' ^^^ "\"" | '\\' ~ '\\' ^^^ "\\" | '\\' ~ '/'  ^^^ "/" | '\\' ~ 'b'  ^^^ "\b" | '\\' ~ 'f'  ^^^ "\f" 
    | '\\' ~ 'n'  ^^^ "\n" |'\\' ~ 'r'  ^^^ "\r" |'\\' ~ 't'  ^^^ "\t" |'\\' ~ 'u' ~> unicodeBlock
    )
  
  import scala.io.UTF8Codec.{encode}
  lazy val unicodeBlock = hexDigit ~ hexDigit ~ hexDigit ~ hexDigit ^^ {
    case a ~ b ~ c ~ d => new String(encode(Integer.parseInt(List(a, b, c, d) mkString "", 16)))
  }
  
  lazy val hexDigits = Set[Char]() ++ "0123456789abcdefABCDEF".toArray
  
  lazy val hexDigit = elem("hex digit", hexDigits.contains(_))
}