from pygments import lexers
from pygments.lexers import get_lexer_by_name
from pygments import lex

lexer = get_lexer_by_name("python",stripall=True)

class lbuffer:
    def __init__(self,source):
        """ Open the file supplied, otherwise present a form to create one. buffer has source, linecount, tokencount. Class derives lexer.  """
        self.source = source
        try:
            self.lines = self.__file_len()
            self.flatbuffer = self.__lbufferit()
            self.linebuffer = self.__llinebufferit() # useful in something else
#            self.tokencount = self.__ltok_cnt() # this was a utility function and doesn't need to be in init
            self.tokenbuffer = self.__lstacktokens()
        except IOError as (errno, errstr):
            print "Problem with file parameter: " + self.source + " Errno/Errstr " + str(errno) + " / " + errstr
    def __file_len(self):
        try:
            with open(self.source) as f:
                for i, l in enumerate(f):
                    pass
            return i + 1
        except IOError as (errno, errstr):
            print "Problem with file parameter: " + self.source + " Errno/Errstr " + str(errno) + " / " + errstr
    def __ltok_cnt(self,tokens):
        """ returns a count of tokens in a buffer  """
        object = lex(tokens,lexer)
        for i, v in enumerate(object):
            pass
        return i + 1
    def __lbufferit(self):
        """ Opens a file for reading, returns a flat buffer of string """
        with open(self.source,"r") as x:
            bbuffer = x.read()
        return bbuffer
    def __llinebufferit(self,idx='False'):
        """ opens a file for reading returns a list of lines"""
        llbuffer = []
        llineidx = 0
        f = open(self.source,"r")
        if (idx == 'True'):
            for x in f.readlines():
                llbuffer.append(str(llineidx))
                llbuffer.append(x)
                llineidx = llineidx + 1
        if (idx == 'False'):
            for x in f.readlines():
                llbuffer.append(x)
        f.close()
        return llbuffer
    def __lstacktokens(self):
        """ returns a list of index,(token,tval) of tokens """
        tokbuffer = []
        tokidx = 0
        try:
            object = lex(self.flatbuffer,lexer)
            for x in object:
                tokbuffer.append(str(tokidx))
                tokbuffer.append(x)
                tokidx = tokidx + 1
        except:
            pass
        return tokbuffer

    def __lbuffer_updatesrc(self):
        """ Function replaces the entire source file from the lbuffer data set """
        f = open(self.source,"w")
        f.seek(0)
        for x in self.llbuffer:
            f.write(x)
        f.close()
        self.__init__(self.source)
    def ldryline(self,iline,itoken,istring):
        lline = self.linebuffer.index(str(iline))
        lline = lline + 1 # move to string index
        return iline + ',' + str(lline) + ',' + itoken + ',' + istring + ', :: ' + self.linebuffer[lline]
    def ldrytoken(self,itoken,istring):
        ltoken = self.tokenbuffer.index(str(itoken))
        ltoken = ltoken - 1
        otherthing = self.tokenbuffer[ltoken][1].encode()
        return otherthing        
#
# lbuffer_update: saves the file from the buffer

##
# Get a token to be updated from the user, run it back through the lexer, count the new tokens, 
# increment the tokens at higher positions by the number of new tokens.
#
# Yes this tokenbuuffer should be its own class.
#
# 
    def __tokenbuffer_update(self,tokidx,newtoken):
        """lookup the token index in the buffer,
           lookup the new tokens from the lexer,
           count those tokens, insert them, and 
           increment the remainders"""

        insertidx = self.tokenbuffer.index(str(tokidx))
#make a copy of insertidx to use in the update of the remaining tokens
        linsertidx = insertidx    
        new_tokens = lex(newtoken,lexer)

        new_tokensi = self.__ltok_cnt(newtoken)
        new_tokensitt = new_tokensi

# [IDX,(TT,TV),IDX,(TT,TV).. ]
        for tokentuple in new_tokens:
            self.tokenbuffer.insert(insertidx, ''+str(tokidx)+'')
            self.tokenbuffer.insert(insertidx + 1, tokentuple)
            insertidx += 2
            tokidx += 1

#slop, getting out of range with the iterator performing after the index update
        llen = len(self.tokenbuffer) - 2
        while (insertidx <= llen):     
            self.tokenbuffer[insertidx] = ''+ str(int(self.tokenbuffer[insertidx]) + new_tokensitt) +''
            insertidx += 2         

        return self.tokenbuffer
