{
  (** Description de l'analyseur lexical WHILE *)
  open Parser
}

let BLANK = [' ' '\t' '\n']

let LINE = [^ '\n']* '\n'
let COMMENT = '#' LINE

let NUM = ['0'-'9']
let BOOL = "true"|"false"
let ALPHA =  ['a'-'z' 'A'-'Z' '_' ]
let WORD = ALPHA (ALPHA | NUM)*
let NUMBER = '-'? ['0'-'9']+

let ANY = _

let LPAR = "("
let RPAR = ")"

let SKIP = "skip"
let SEQ = ";"
let WHILE = "while"
let DO = "do"
let IF = "if"
let THEN = "then"
let ELSE = "else"
let ASSIGN = ":="

let PLUS = "+"
let MINUS = "-"
let TIMES = "*"
let DIV = "/"
let POW = "^"

let NOT = "!" | "~"
let AND = "&&"
let OR  = "||"

let EQUAL = "="
let NEQ   = "<>" | "!="
let LESS  = "<"
let LESSE = "<="
let GREAT = ">"
let GREATE= ">="

rule make_token = parse

  | BLANK               {make_token lexbuf}     (* Skip blanks and comments*)
  | COMMENT             {make_token lexbuf}

  | eof                 {Token_EOF}     (* Give up on end of file *)

  | LPAR                {Token_LPar}
  | RPAR                {Token_RPar}

  | SKIP                {Token_Skip}
  | SEQ                 {Token_Seq}
  | WHILE               {Token_While}
  | DO                  {Token_Do}
  | IF                  {Token_If}
  | THEN                {Token_Then}
  | ELSE                {Token_Else}
  | ASSIGN              {Token_Assign}

  | PLUS                {Token_Plus}
  | MINUS               {Token_Minus}
  | TIMES               {Token_Mult}
  | DIV                 {Token_Div}
  | POW                 {Token_Pow}
  | NOT                 {Token_Not}
  | AND                 {Token_And}
  | OR                  {Token_Or}
  | EQUAL               {Token_Equal}
  | NEQ                 {Token_NotEqual}
  | LESS                {Token_Less}
  | LESSE               {Token_LessEqual}
  | GREAT               {Token_Greater}
  | GREATE              {Token_GreaterEqual}

  | NUMBER
    {
      let s = (Lexing.lexeme lexbuf)
      in Token_Num(int_of_string(s))
    }
  | BOOL 
    {
        let s = (Lexing.lexeme lexbuf)
        in Token_Bool(bool_of_string(s))
    }

  | WORD
      {
        let s = (Lexing.lexeme lexbuf)
        in Token_Var(s)
      }

  | ANY (* Default case: just skip the character *)
      {
        let s = (Lexing.lexeme lexbuf)
        in (print_string ("Bad char: " ^ s ^ ". Continuing...\n");
            make_token lexbuf)
      }
