(* 
Author Esther Kundin
This module does a simple string tokenization on the question 
string keeping track of regular strings and references to 
variables
*)

{
  open Strast
  (* 
   here is a list where we keep each token - a Variable 
   or a Str, we need a ref so that we can change it otherwise, 
   it's immutable 
   *)
  let toklist = ref []
}


let identchar = ['A'-'Z' 'a'-'z' '_' '0'-'9']
let tok = '$' identchar+
rule tokenize = parse
 (* deal with escaped $ *)
  | '\\' '$'      
     { toklist.contents <- List.append [Str("$")] toklist.contents; 
	   tokenize lexbuf }
 (* deal with escaped quote *)
  | '\\' '"'     
     { toklist.contents <- List.append [Str("\"")] toklist.contents; 
	 tokenize lexbuf }
 (* deal with other slashes *)
  | '\\'         
     { toklist.contents <- 
	      List.append [Str("\\\\")] toklist.contents; 
	   tokenize lexbuf }
 (* ignore quotes *)
  | '"'    { tokenize lexbuf }
 (* ignore newline inside a string (not an explicit newline) *)
  | '\n'    {tokenize lexbuf}
 (* deal with variables referenced with a $ *)
  | tok         
      { toklist.contents <- 
	  List.append [Variable(Lexing.lexeme lexbuf)] toklist.contents; 
	  tokenize lexbuf }
 (* here is everything else *)
  | _           
      { toklist.contents <- 
	  List.append [Str(Lexing.lexeme lexbuf)] toklist.contents; 
	  tokenize lexbuf }
  | eof         { () }
  
  
  
(**** 
the following was used for testing just this module 
{
let string_of_printstring = function
   Variable(s) -> print_string ("variable: "); print_endline(s)
 | Str(s) -> print_endline(s)

  let main () =
    let lexbuf = Lexing.from_string "this is a $token test \\$token2 " in
    tokenize lexbuf;
    Printf.printf "tokens: \n" ;
    List.iter string_of_printstring toklist.contents

  let _ = Printexc.print main ()
}
****)
