#!/usr/bin/env ruby
#
#  Created by Dzema Dmitri on 2006-10-22.
#  Copyright (c) 2006. All rights reserved.
require "Tokens"
require "Error"
require "Messages"

  OPERATIONS  = ['=','+','-','*','/','.','<','>']
  DOPERATIONS = ['==','<=','>=', '&&', '||', '!=']
  KEYWORDS    = ['for','if','else','end','while','def','return',   'int', 'float', 'string', 'array']
  DELIMETERS  = [';','(',')','[',']','{','}']
  QMARKS      = ['"',"'"]
  
class Tokenizer
  attr_reader :line, :pos
  
  def initialize(filePath, log)
    @log  = log
    @file = File.new(filePath,"r")
    @log.info("Tokenizer started normaly")
    @line = 1
    @pos  = 1
    dopsRegExStr  = String.new()
    @currentToken = StartToken.new("",@line,@pos)
    @opsRegEx     = Regexp.new(OPERATIONS.join('|\\'))
    DOPERATIONS.each {|op| dopsRegExStr << '|\\'; dopsRegExStr << op[0]; }
    @dopsRegEx    = Regexp.new(dopsRegExStr)
    @delimRegEx   = Regexp.new(DELIMETERS.join('|\\'))
    @qMarksRegEx  = Regexp.new(QMARKS.join('|\\'))
  end
  
  def next_token
    @log.debug("[Tokenizer] next_token")
    
    if @currentToken.kind_of?(EndFileToken)
      return @currentToken
    end
    
    skip_spaces
    symbolCode = get_char 
    
    if symbolCode.nil?
      replace_current_token EndFileToken.new("",@line,@pos)
      return @currentToken
    end
    
    symbol = symbolCode.chr
    case symbol
    when /\d/
      parse_number(symbol)
    when /[a-zA-Z_]/
      parse_identifier(symbol)
    when @qMarksRegEx
      parse_quote_marks(symbol)
    when @delimRegEx
      parse_delimeter(symbol)
    when @dopsRegEx
      parse_doperation(symbol)
    when @opsRegEx
      parse_operation(symbol)
    else 
      error = Error.new(Messages[:other][:un_rec_symbol], @line, @pos-1)
    end
    error && replace_current_token(ErrToken.new(error))
    @currentToken
  end
  
  def get_token
    @log.debug("[Tokenizer] get_token")
    return @currentToken
  end
  
  def to_s
    "[Tokenizer] operating <#{@file.path}> | current line #{@line} | current symbol #{@pos}"
  end
  
  def require_token(token_s)
    t = get_token
    return (t.value == token_s) && next_token || false
  end
  
  private
  def replace_current_token(newToken)
    if !newToken.error
      @currentToken = newToken
    else
      puts("Lexical error: #{newToken.error}")
      next_token
    end
  end
  
  def skip_spaces
    @log.debug("[Tokenizer] skip_spaces")
    
    while (char = get_char) && char && /(\s|#)/.match(char.chr) && !@file.eof?
      if char.chr == "\n"
        @line = @line.next
        @pos = 1
      elsif char.chr == "#"
        @file.gets
        @line = @line.next
        @pos = 1
      end
    end
   unget_char(char) if !@file.eof?
#   skip_comments
  end
  
  #TODO: исправить комментарии
  def skip_comments
    @log.debug("[Tokenizer] skip_comments")
    
    char = get_char || return
    if char.chr == "#"
      while char.chr == "#"
        @file.gets
        @line = @line.next
        @pos = 1
        char = get_char
      end
      return
    end
    unget_char(char)
  end
  
  def get_char
    @pos = @pos.next
    char = @file.getc
    @log.debug("[Tokenizer] get_char -> #{char ? char.chr : 'nil'}")
    return char
  end
  
  def unget_char(symbol)
    @log.debug("[Tokenizer] unget_char -> #{symbol ? symbol.chr : 'nil'}")
    
    @pos = @pos - 1
    @file.ungetc(symbol)
  end
  
  def parse_number(string)
    @log.debug("[Tokenizer] parse_number")
    
    lookingForToken = true
    tokenPosition = @pos - 1
    while lookingForToken && (char = get_char) && (char != nil)
      case char.chr
      when /\d/
        string << char 
      when /\./
        string << char
        parse_float(string, tokenPosition)
        lookingForToken = false
        floatTokenFound = true
      else
        lookingForToken = false
        unget_char(char) if !@file.eof?
      end
    end
    if !floatTokenFound
      replace_current_token IntegerToken.new(string.to_i,@line,tokenPosition)
    end
  end
  
  def parse_float(string, tokenPosition)
    @log.debug("[Tokenizer] parse_float")
    
    lookingForToken = true
    eFound = false
    errHappen = nil
    while lookingForToken && (char = get_char) && (char != nil)
      case char.chr
      when /\d/
        string << char
      when /e/
        if !eFound
          string << char
          eFound = true
        else
          errHappen = Error.new(Messages[:float][:wrong_format], @line, tokenPosition)
          string << char
        end
      when /-|\+/
        if string[string.length-1].chr == "e"
          string << char
        else
          lookingForToken = false
          unget_char char
        end  
      else
        lookingForToken = false
        unget_char char
      end
    end
    replace_current_token FloatToken.new(string.to_f,@line,tokenPosition,errHappen)
  end
  
  def parse_identifier(symbol)
    @log.debug("[Tokenizer] parse_identifier")
    
    string = symbol
    lookingForToken = true
    while lookingForToken && !@file.eof?
      char = get_char
      if /[0-9a-zA-Z_]/.match(char.chr)
        string << char
      else
        lookingForToken = false
        unget_char char
      end
    end
    if KEYWORDS.include?(string)
      replace_current_token KeywordToken.new(string, @line, @pos - 1)
    else
      replace_current_token IdentifierToken.new(string, @line, @pos - 1)
    end
  end
  
  def parse_operation(string)
    @log.debug("[Tokenizer] parse_operation")
    
    replace_current_token OperationToken.new(string, @line, @pos - 1)
  end
  
  def parse_doperation(string)
    @log.debug("[Tokenizer] parse_doperation")
    
    char = get_char
    t_string = "" << string << char
    if DOPERATIONS.include?(t_string)
      replace_current_token OperationToken.new(t_string, @line, @pos - 1)
    else
      unget_char char
      parse_operation string
    end
  end
  
  def parse_delimeter(string)
    @log.debug("[Tokenizer] parse_delimeter")
    
    replace_current_token DelimeterToken.new(string, @line, @pos - 1)
  end
  
  def parse_quote_marks(startMark)
    string = String.new()
    bSlashFound = false
    lookingForToken = true
    ds = false
    curPos = @pos; curLine = @line;
    while lookingForToken && (char = get_char) && (char != nil)
      case char.chr
      when '\\'
        bSlashFound && string << char && bSlashFound = false && ds = true
        !ds && bSlashFound = true
        ds |= false
      when startMark
        !bSlashFound && lookingForToken = false        
        bSlashFound && string << char && bSlashFound = false
      else
        if bSlashFound
          string << "\\"
          bSlashFound = false
        end
        string << char
      end
    end
    if !lookingForToken
      replace_current_token StringToken.new(string, curLine, curPos - 1, startMark)
    else
      err = Error.new(Messages['String']['NoEnd'], curLine, curPos - 1)
      replace_current_token ErrToken.new(err)
    end
  end
  
end

