#!usr/bin/python
# -*- coding: utf8 -*-
encoding='utf8'
'''утф'''

import os, sys, re, codecs
from Source import Source, readFile

## Start concatenation ##

class Token(object):
    def __init__(self, word, parent = None):
        self.src = word
        self.parent = parent
        self.re_ident = re.compile(ur'(?iu)([a-z_][a-z0-9_]*)')
        self.re_operand = re.compile(ur'(?iu)(["\'!@#$%\^&*\(\)_'+\
                                     ur'+=\-<>?;:\[\]{}\\|/`~,.])')
        self.re_dec = re.compile(ur'(?iu)(\b[1-9][0-9]*\b)')
        self.re_oct = re.compile(ur'(?iu)(\b0[0-7]*\b)')
        self.re_hex = re.compile(ur'(?iu)(\b0x[0-9a-f]+\b)')
        self.type = None
        self.node_type = 'leaf'
        self.origin = None
        self.line_id = -1
        self.classify()

    def setOrigin(self, origin):
        self.origin = origin

    def position(self):
        return u'%s[%d]' % (self.origin.filename, self.origin.line_id)

    def text(self):
        return self.src

    def classify(self):
        if self.re_ident.match(self.src):
            self.type = 'ident'
        elif (len(self.src) >= 2) and (self.src[0] == self.src[-1] in u'"\''):
            self.type = 'string'
        elif self.re_operand.match(self.src):
            self.type = 'oper'
        elif self.re_dec.match(self.src):
            self.type = 'decimal'
        elif self.re_oct.match(self.src):
            self.type = 'octuple'
        elif self.re_hex.match(self.src):
            self.type = 'hexadecimal'
        elif self.src in u'\r\n\t ':
            self.type = 'ws'
        else:
            print 'Error: Unclassified: "%s"' % (u', '.join(\
                    [u"'%s'" % (s) for s in self.src]))

class Tokenize(object):
    def __init__(self, source):
        self.source = source
        self.tokens = []
        self.Parse()

    def Parse(self):
        for line in self.source.byLine():
            if len(line):
                parts = self.splitTokens(line)
                if parts:
                    for part in parts:
                        t = Token(part, self.source)
                        t.setOrigin(self.source.getOrigin())
                        self.tokens.append(t)
                else:
                    print 'Tokenize.Parse can not split:'
                    print self.source.origin()
                    print u'`'.join(list(line))
            #t = Token(u'\n', self.source)
            #t.setOrigin(self.source.getOrigin())
            #self.tokens.append(t)

    def splitTokens(self, line):
        pat_ident_start = u'abcdefghijklmnopqrstuvwxyz_' + u'0123456789'
        pat_ident_cont  = pat_ident_start# + u'0123456789'
        pat_whitespace = u' \t\r\n'
        pat_operand = u'"\'!@#$%^&*()_+=-<>?;:[]{}\\|/`~,.'
        t, current = [], u''
        f_ident, f_white, f_string = False, False, False
        for oc in line:
            # oc -- original char
            c = oc.lower()
            do_repeat = True
            while do_repeat:
                do_repeat = False
                if f_ident:
                    if c in pat_ident_cont:
                        current+= oc
                    else:
                        t.append(current)
                        f_ident = False
                        do_repeat = True
                elif f_white:
                    if c in pat_whitespace:
                        pass
                    else:
                        current = oc
                        f_white = False
                        do_repeat = True
                elif f_string:
                    current+= oc
                    if c == f_string:
                        f_string = False
                        t.append(current)
                else:
                    current = u''
                    if c in pat_ident_start:
                        f_ident = True
                        current+= oc
                    elif c in pat_whitespace:
                        f_white = True
                        current+= oc
                    elif c in pat_operand:
                        if c in u'"\'':
                            f_string = c
                            current+= oc
                        else:
                            t.append(oc)
                    else:
                        print u"Error: '%s' undefined." % (oc)
                        return
        if len(current):
            t.append(current)
        return filter(len, t)

## Stop concatenation ##

def main():
    filename = 'tests\source1.cpp'
    s = readFile(filename)
    t = Tokenize(s)
    t.Parse()

if __name__ == '__main__':
    main()
