# data stream
import logging
import cPickle as pkl
from fuel.datasets import TextFile
from fuel.schemes import ConstantScheme
from fuel.streams import DataStream
from fuel.transformers import Batch, Padding, SortMapping, Mapping, Unpack

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


def _length(sentence):
    return len(sentence[0])
    

def DStream(uses, datatype, config):
    if uses not in ['train', 'dev', 'test']:
        logger.error('wrong uses, whitch must be one of train, dev, or test')
    if datatype not in ['word', 'pos', 'nterm', 'op']:
        logger.error('wrong datatype, whitch must be one of corpus, pos, or op')

    batch_size = config['batch_size_' + uses]
    file_name = config[datatype + '_' + uses]

    lookup_table = config[datatype + '_table']
    dit = pkl.load(open(lookup_table, 'rb'))

    data_set = TextFile(files=[file_name],
                        dictionary = dit,
                        level='word',
                        unk_token = config['unk_token'], 
                        eos_token = None, 
                        bos_token = None
                        )


    stream = DataStream.default_stream(data_set)
    stream.sources = ('sentence',)
    
    # Build a batched version of stream to read k batches ahead
    #stream = Batch(stream, 
    #    iteration_scheme=ConstantScheme(
    #        batch_size*config['sort_k_batches']))

    ## Sort all samples in the read-ahead batch
    #stream = Mapping(stream, SortMapping(_length))

    ## Convert it into a stream again
    #stream = Unpack(stream)

    # Construct batches from the stream with specified batch size
    stream = Batch(stream, iteration_scheme=ConstantScheme(batch_size))

    # Pad shorter sequences with zeros
    stream = Padding(stream)

    return stream

if __name__ == '__main__':
    # just for debug DStream
    import configurations
    configuration = getattr(configurations, 'get_config_cnn')()
    print configuration
    ds = DStream(datatype='nterm', uses = 'test', config=configuration)
    for data in ds.get_epoch_iterator():
            print data
            #print mask
