import process_efw
import window_coding
import cantor
import feedforward
import net

OFFSET = 0.9
DEFAULT = 0.9
FILENAME =  "my_efw"
CODESIZE = 20
ENLAYERS = [100]
DELAYERS = [190]

def setup_big_nets(input_dim, en_dim, en_hidden, de_hidden):
    """Initiates encoder and decoder nets with correct input & output dimensions."""


    enlayers = [input_dim*3 + en_dim] + en_hidden + [en_dim]
    delayers = [en_dim + input_dim*2] + de_hidden + [input_dim + en_dim]
    encoder = feedforward.generate_net(enlayers, net.gaussianGen)
    decoder = feedforward.generate_net(delayers, net.gaussianGen)
    en_mom = feedforward.generate_net(enlayers, lambda: 0.0)
    de_mom = feedforward.generate_net(delayers, lambda: 0.0)

    return (encoder, decoder, en_mom, de_mom)



alphabet = ['#','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','-', "'", '.']

(directions, dim) = cantor.setup_directions(alphabet, offset = OFFSET, default = DEFAULT)

window_coding.DEFAULT_ENCODING = [OFFSET for c in xrange(CODESIZE)]
window_coding.DEFAULT_INPUT = directions[0]     # '#' is default character
window_coding.ENDWORD_CODING = [directions[0][0] for n in xrange(CODESIZE)]

gram = process_efw.WordCorpusGram(process_efw.process_file(FILENAME), alphabet)

(encoder, decoder, en_mom, de_mom) = setup_big_nets(dim, CODESIZE, ENLAYERS, DELAYERS)

def checknan(mom):
    for layer in xrange(len(mom)):
        for n in xrange(len(mom[layer])):
            for i in xrange(len(mom[layer][n])):
                if str(mom[layer][n][i]) == 'nan':
                    raise "mom[" + str(layer) + "][" + str(n) + "][" + str(i) + "] nan"


def train(n):
    global en_mom, de_mom
    for i in xrange(n):
        numseq = gram.generate()
        sequence = [directions[num] for num in numseq]

        (en_mom, de_mom, error) = window_coding.adapt(sequence, encoder, decoder, en_mom, de_mom)
#        checknan(en_mom)
#        checknan(de_mom)
        print error

    return (en_mom, de_mom, error)

#train(100000)
