
#include "stdafx.h"

using namespace boost::python;
using namespace std;

struct PY_TOKENIZER_CALLBACK : ds::TOKENIZER_ERROR_CALLBACK {
   PY_TOKENIZER_CALLBACK(object cb)
      : cb(cb)
   {}

   object cb;

   virtual void Error(ds::TOKENIZER_ERROR_TYPE err, int line, char ch = ' ') {
      switch (err) {
         case ds::TOKEN_ERR_EOF_IN_COMMENT:
            cb.attr("eofInComment")(line);
            break;
         case ds::TOKEN_ERR_EOLN_IN_LITERAL:
            cb.attr("eolnInLiteral")(line);
            break;
         case ds::TOKEN_ERR_UNEXPECTED_SYMBOL:
            cb.attr("unexpectedSymbol")(line, ch);
            break;
         default:
            STRONG_ASSERT(!"Unknown tokenizer error");
            break;
      }
   }
};

struct PY_TOKEN {
   int line;
   string type; // 'int', 'float', 'string', 'word' or 'symbol'
   string value;

   PY_TOKEN(string type = string(), string value = string(), int line = 0)
      : line(0)
      , type(type)
      , value(value)
   {}

   CLASS_WRAPPER_BEGIN(PY_TOKEN)
      class_<PY_TOKEN> ("Token")
         .def(init<string>())
         .def(init<string, string>())
         .def(init<string, string, int>())
         .def_readwrite("line",      &PY_TOKEN::line,     "token declaration line")
         .def_readwrite("type",      &PY_TOKEN::type,     "token type (could be 'int', 'float', 'string', 'word' or 'symbol')")
         .def_readwrite("value",     &PY_TOKEN::value,    "token value")
      ;
   CLASS_WRAPPER_END()
};

object Tokenize(string text, object cb)
{
   ds::CPP_LIKE_TOKENIZER tokenizer;
   tokenizer.RegisterCppSymbols(ds::TOKEN_USER);

   dsVECTOR<ds::TOKEN> tokens;
   PY_TOKENIZER_CALLBACK callback(cb);

   if (!tokenizer.Tokenize(text.c_str(), tokens, &callback)) {
      return object();
   }

   list res;

   for (int k = 0; k < tokens.Length(); ++k) {
      PY_TOKEN * token = NULL;
      object obj = PY_TOKEN::New(&token);
      token->line = tokens[k].line;
      token->value = tokens[k].value.CStr();
      switch (tokens[k].token) {
         case ds::TOKEN_INT:     token->type = "int";      break;
         case ds::TOKEN_FLOAT:   token->type = "float";    break;
         case ds::TOKEN_WORD:    token->type = "word";     break;
         case ds::TOKEN_STRING:  token->type = "string";   break;
         case ds::TOKEN_USER:    token->type = "symbol";   break;
         default: STRONG_ASSERT(!"Unknown token type"); token->type = "string"; break;
      }

      res.append(obj);
   }

   return res;
}

BOOST_PYTHON_MODULE(Tokenizer)
{
   def("tokenize", Tokenize);
   PY_TOKEN::Type();
}