// Copyright 2012 Jay Young. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//      http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

/**
 * @fileoverview
 *
 * The Tokenizer class.
 * 
 * @author Jay Young (jayyoung9909@gmail.com)
 * @see Relief!
 */

goog.provide('rdoc.Tokenizer');



/**
 * A stream reader interface.
 *
 * @interface
 */
rdoc.Stream = function() {};


/**
 * @return {string} The next token.
 */
rdoc.Stream.prototype.nextToken = goog.abstractMethod;



/**
 * The Tokenizer class.  Responsible for tokenizing an input stream.
 *
 * @deprecated Use something else.
 *
 * @see Spot.
 * @see Spot run.
 * @run Spot, run!
 *
 * @param {string} filename The name of the file to tokenize.
 * @param {boolean} warnings Display parser warnings.
 * @constructor
 */
rdoc.Tokenizer = function(filename, warnings) {
  /**
   * The file to tokenize.
   *
   * @type {string}
   * @private
   */
  this.filename_ = filename;
};


/**
 * Print the current representation.
 */
rdoc.Tokenizer.prototype.print = function() {
  
  this.printed_ = true;
  console.log('Printing.');
};


rdoc.Tokenizer.prototype.defaultLength = 0;

/**
 * @param {Object.<string, number>} error The error to print.
 * @param {!Array.<rdoc.Tokenizer.Type>} caps True if you want to capitalize for emphasis.
 * @return {!rdoc.Tokenizer} Returns this for chaining.
 */
rdoc.Tokenizer.prototype.printError = function(error, caps) {
  console.log(caps ? error.toUpperCase() : error);
  return this;
};


/**
 * An enum of token types.
 *
 * @see JavaDoc for com.google.javascript.rhino.Token
 * 
 * @enum {string}
 * @deprecated 
 */
rdoc.Tokenizer.Type = {
  /** A string token. */
  STRING: 'string',
  
  /** A number token */
  NUMBER: 'number',
  
  /** An object literal token */
  OBJECTLIT: 'object literal',

  /** A uniqueified name */
  EVENT: goog.events.getUniqueId('event')
};


/**
 * An enum of HTTP codes - because I'm running out of lexer-related analogies.
 *
 * @enum {number}
 * @private
 */
rdoc.Tokenizer.HTTPResponseCode_ = {
  /** All good! */
  '\\o/': 200,
  
  /** Look over there! */
  '=>': 303,
  
  '???': 404,
  
  /** OCRAP */
  '/o\\': 500
};
