{
module LPParser(Rule(..), Token(..), parse, lexer) where
import Char(isSpace, isAlpha, isDigit, isAlphaNum)
}

%name parse
%tokentype { Token }
%error { parseError }

%token 
      '\n'             { TokenNL }
      '{'              { TokenCurlyOpen }
      '}'              { TokenCurlyClose }
      token            { TokenToken }
      ident            { TokenIdentifier $$ }
      string           { TokenStringLiteral $$ }
      char             { TokenChar }
      keyword          { TokenKeyword }

%%

Rules : Rule Rules              { $1 : $2 }
      | {- empty -}             { [] }

Rule  : token ident '{' ident '}' '{' ident '}' '\n'    { RuleToken $2 $4 $7 }
      | char string '{' ident '}' '\n'                  { RuleChar $2 $4 }
      | keyword string '{' ident '}' '{' ident '}' '\n' { RuleKeyword $2 $4 $7 }
      | '\n'                                            { NoRule }

{

parseError :: [Token] -> a
parseError (token:xs) = error ("Parse error on token " ++ (show token))

data Rule 
      = RuleToken String String String    -- Name, Type, toString function
      | RuleChar String String            -- Char, Token
      | RuleKeyword String String String  -- Keyword, Token, Value
      | NoRule
      deriving Show

data Token
      = TokenNL
      | TokenToken
      | TokenCurlyOpen
      | TokenCurlyClose
      | TokenIdentifier String
      | TokenStringLiteral String
      | TokenChar
      | TokenKeyword
 deriving Show

lexer :: String -> [Token]
lexer [] = [TokenNL] -- Make sure we always end with a newline
lexer (c:cs)
      | c == '"' = buildString cs ""
      | c /= '\n' && isSpace c = lexer cs
      | isAlphaNum c = lexVar (c:cs)
lexer ('#':cs) = lexer (consumeComment cs)
lexer ('{':cs) = TokenCurlyOpen : lexer cs
lexer ('}':cs) = TokenCurlyClose : lexer cs
lexer ('\n':cs) = TokenNL : lexer cs

consumeComment :: String -> String
consumeComment ('\n':xs) = xs
consumeComment []        = []
consumeComment (x:xs)    = consumeComment xs

buildString :: String -> String -> [Token]
buildString (x:xs) s | x == '"' = if (head xs) == '"'
                                  then buildString (tail xs) (s ++ "\\\"")
                                  else TokenStringLiteral s : lexer xs
                     | otherwise = buildString xs (s ++ [x])

lexVar cs =
   case span isAlpha cs of
      ("Token",rest) -> TokenToken : lexer rest
      ("Char",rest)  -> TokenChar : lexer rest
      ("Keyword",rest)  -> TokenKeyword : lexer rest
      (var,rest)   -> TokenIdentifier var : lexer rest

}



