import coding
import cantor_net
import process_efw

OFFSET = 0.9
DEFAULT = 0.9
FILENAME =  "my_efw"
HIDLAYERS = [100]

alphabet = ['#','a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','-', "'", '.']

freegram = coding.FreeGrammar(10, alphabet)
efwgram = process_efw.WordCorpusGram(process_efw.process_file(FILENAME), alphabet)

free_params = (encoder, decoder, directions, freegram) = cantor_net.setup_nets(freegram, HIDLAYERS)
efw_params = free_params[:3] + (efwgram,)

en_mom = None
de_mom = None
free_params += (en_mom, de_mom)
efw_params += (en_mom, de_mom)

#(encoder, decoder, enval, deval) = cantor_net.train(5, *nets_params)

fsteps = lambda n: cantor_net.train(n, learnrate = 0.02, *free_params)[2:]
esteps = lambda n: cantor_net.train(n, learnrate = 0.02, *efw_params)[2:]

def apply():
    numseq = efwgram.generate()
    print "INPUT:"
    print [alphabet[n] for n in numseq]
    print "OUTPUT:"
    print cantor_net.apply(encoder, decoder, numseq,alphabet, directions)
