/*
 * This file is part of din.
 *
 * din is copyright (c) 2006 - 2012 S Jagannathan <jag@dinisnoise.org>
 * For more information, please visit http://dinisnoise.org
 *
 * din is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 2 of the License, or
 * (at your option) any later version.
 *
 * din is distributed in the hope that it will be useful, but WITHOUT
 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 * for more details.
 *
 * You should have received a copy of the GNU General Public License along
 * with din.  If not, see <http://www.gnu.org/licenses/>.
 *
*/
// string tokenizer
// based on initial code by Song Ho Ahn (song.ahn@gmail.com)

#include "tokenizer.h"
#include <cstdlib>

#include <iostream>
using namespace std;

tokenizer::tokenizer() {
  str ("");
  del (DEFAULT_DELIMITER);
}

tokenizer::tokenizer(const std::string& s, const std::string& d) {
  str (s);
  del (d);
}

tokenizer::tokenizer (const std::vector<std::string>& t) {
  vec (t);
}

void tokenizer::set (const std::string& s, const std::string& d) {
  str (s);
  del (d);
}

void tokenizer::str (const std::string& s) {
  buffer = s;
  blen = buffer.length ();
  init_cur ();
  tokens_available = 0;
}

void tokenizer::del (const std::string& d) {
  delimiter = d;
  dlen = delimiter.length ();
}

void tokenizer::vec (const std::vector<std::string>& t) {
  tokens = t;
  tokens_available = 1;
  tokid = 0;
}

void tokenizer::init_cur () {
  if (blen) cur = 0; else cur = -1;
}

tokenizer& tokenizer::operator>> (std::string& s) {
  if (tokens_available) {
    if (tokid < tokens.size()) s = tokens[tokid++]; else {
      s = "";
      cur = blen;
    }
  } else {
    token = "";
    if (blen < 1) {
      s = token;
    } else {
      skip_ws ();
      while ((cur < blen) && !isdelim (buffer[cur])) {
        token += buffer[cur];
        ++cur;
      }
      s = token;
    }
  }
  return *this;
}

tokenizer& tokenizer::operator>> (float& f) {
  string s; *this >> s;
  f = atof (s.c_str());
  return *this;
}

tokenizer& tokenizer::operator>> (int& i) {
  string s; *this >> s;
  i = atoi (s.c_str());
  return *this;
}

tokenizer& tokenizer::operator>> (char& c) {
  string s; *this >> s;
  if (s.length()) c = s[0];
  return *this;
}

tokenizer& tokenizer::operator>> (double& d) {
  string s; *this >> s;
  d = (double) atof (s.c_str());
  return *this;
}

void tokenizer::skip_ws () {
  while (cur < blen) if (!isdelim (buffer[cur])) break; else ++cur;
}

bool tokenizer::isdelim (char c) {
  for (int i = 0; i < dlen; ++i) {
    if (delimiter[i] == c) return true;
  }
  return false;
}

string tokenizer::cur2end () {
  if (tokens_available) {
    string result;
    static const char spc = ' ';
    for (int i = tokid, j = tokens.size(); i < j; ++i) result = result + tokens[i] + spc;
    return result;
  } else {
    if (cur > -1) {
      return buffer.substr (cur, blen - 1);
    } else return "";
  }
}
