//tokens.cpp
//Authors: Matthew Lorenzo and Daniel Diaz

//History:
// 2/22/12 - Created cpp file for tokenizing
// 2/24/12 - Edited cpp file implementing tokenizing by reading line by line, and character by character
// 2/28/12 - Edited Tokenize function to most recent version, also still needs testing 

#include "tokens.hh"
#include <fstream>
#include <iostream>

using namespace std; 

// seperate line by line, look for # for comments and cut the string apart tokenizing
// the comment and then cut the remaining string separated by whitespace, and analyze each 
// string that is seperated 





//Defining Tokenize function
//This function takes a list of tokens from a previous file(if 2+ files)
//and adds new tokens from the file to the existing vector of tokens 
void Tokenize(TokensHeader * TH, string filename)
{
    ifstream myfile;
    myfile.open (filename.c_str());
    vector<string> T;
    string line;
    string tok;
    unsigned int i;
    unsigned int linenumber=0;
    unsigned int position=0;
    while(getline(myfile,line))
    {
        linenumber++;
        if(line.length() != 0)
            {
             if(line[line.length()] == ':')
                {
                    line.erase(line.length());
                    T.push_back(line);
                }
            else
                {
                    for(i=0;i<line.length();i++)
                    {
			if(line[i]==' ' || line[i]==line[line.length()])
                        {
                            if(!tok.empty())
                            {
                                T.push_back(tok);
                                tok.clear();
                            }
                        }
                        
			else if(line[i]=='(')
			{
				if(!tok.empty())
				{
					T.push_back(tok);
					tok.clear();								}
				T.push_back("(");
			}

			else if(line[i]==')')
			{
				if(!tok.empty())
				{
					T.push_back(tok);
					tok.clear();
				}
				T.push_back(")");
			}

			else if(line[i] ==',')
                        {
                            if(!tok.empty())
                            {
                                T.push_back(tok);
                                tok.clear();
                            }
                            T.push_back(",");
                        }
                        else
                        {
                            tok += line[i];
                        }
                    }
                }
		if(!tok.empty())
		{
			T.push_back(tok);
			tok.clear();
		}
                line.clear();
            }
        if(!T.empty())
        {
            TH->AppendToken(T,linenumber,filename,position);
            position++;
        }
        T.clear();
    }
    myfile.close();
}
    
    /*

    
    ifstream inFile;
    inFile.open(filename, ios::in);
    
    string line; 
    unsigned int x;
    unsigned int size;
    
begin:
    while(getline(filename, line))
    {
        
        th.IncLine();
        x=0;
        while(!line[x]==0)
        {
            if(line[x]=='#') //comment tokenizing
            {
                size=line.length()-x+1; //gets the length of the line from # and beyond
                comment=string(line,x-1,size); //constructs a string starting at "x-1" position and ending after "size" characters
                th.AppendToken(comment,th.GetLinenum(),filename,'c');
                delete comment;
                goto begin; //jumps to next line since all text after # is commented
            }
            
            else if(line[x]=='.') //directive tokenizing
            {
                while(!(line[x]==' ' || line[x]=='"'))
                {
                    directive=new string;
                    directive+=line[x]; //tokenizes the directive by appending character by character until a whitepace is reached
                    x++;
                }
                th.AppendToken(directive,th.GetLinenum(),filename,'d');
                delete directive;
            }
            
            else if(line[x]==':')
            {
                x=0;
                while(!line[x]==':')
                {
                    label=new string;
                    label+=line[x];
                    x++;
                }
                label+=":";
                th.AppendToken(label,th.GetLinenum(),filename,'l');
                delete label;
            }
            

            else if(line[x]=='$')
            {
                while(!line[x]==',' || line[x]==' ' || line[x]='/n')
                {
                    reg= new string;
                    reg+=line[x];
                    x++;
                }
                th.AppendToken(reg,th.GetLinenum(),filename,'r');
                delete reg;
            }
            
            else if(line[x]=='(')
            {
                th.AppendToken("(",th.GetLinenum(),filename,'a');
            }
            
            else if(line[x]==')')
            {
                th.AppendToken(")",th.GetLinenum(),filename,'b');
            }
            
            else if(line[x]==',')
            {
                th.AppendToken(",",th.GetLinenum(),filename,'z');
            }
            
            else if(line[x]!=' ')
            {
                while(line[x]!=' ' || line[x]!=',' || line[x]!='(' || line[x]!='/n')
                {
                    s=new string;
                    s+=line[x];
                    x++;
                }
                th.AppendToken(s,th.GetLinenum(),filename,'u');
                delete s;
            }
            else{}
            x++;
            
        }
    }
    ~th;
}
*/
