class Tokenizer:
  def __init__(self, filename):
    self.__content = open(filename).read()
    self.__separator = {' ': 1, '\t': 1, '\r': 1, '\n': 1}
    self.__singlechar = {'(':1, ')':1, '[':1, ']': 1, ',':1, '=':1, '"':1, '\'': 1}
    self.__p = 0
    self.__end = len(self.__content)

  def advance(self):
    if self.__p == self.__end:
      return None
    if self.__content[self.__p] in self.__separator:
      while (self.__p != self.__end and
             self.__content[self.__p] in self.__separator):
        self.__p += 1
      return self.advance()
    elif self.__content[self.__p] == '#':
      while self.__p != self.__end and self.__content[self.__p] != '\n':
        self.__p += 1
      return self.advance()
    else:
      start = self.__p
      if self.__content[start] in self.__singlechar:
        self.__p += 1
        return self.__content[start : self.__p]
      while (self.__p != self.__end and
             not self.__content[self.__p] in self.__separator and
             not self.__content[self.__p] in self.__singlechar):
        self.__p += 1
      return self.__content[start : self.__p]

  def peek(self):
    self.__last_p = self.__p
    value = self.advance()
    self.__p = self.__last_p
    return value


