#!/usr/bin/env python
# This module takes a piece of text and returns that text in Unisyn's accent specific lexical keysymbols.

import nltk
import config
import dicts
#  Import some HTML File.

def replace_text(tokens):
    


def tag_tokens(tokens):
    text_tags = nltk.pos_tag(tokens)
    return text_tags      


 
def tokenize(raw):    
    sentences = nltk.sent_tokenize(raw)
    sentences = [sent.strip() for sent in sentences]
    for sent in sentences:
        tokens.extend(nltk.word_tokenize(sent))
    return tokens
    
   
def clean(words):
    raw = open(path).read()
    raw = raw.lower()
    raw = raw.replace('\r\n', '')
    return raw
    
def main():
    running_text = 
    raw = clean(path)
    tokens = tokenize(raw)
    text_tags = tag_text(path)
    user
    running_prons = replace_text(text_tags)

if __name__ == 'main':
    main()

