'''
Created on Mar 20, 2013

@author: pvenkateshan
'''

import string 
from random import choice
from collections import defaultdict

class Markov(object):
	'''
	classdocs
	''' 
	sourceTextFile = None; 
	bigrams = defaultdict(list)
	trigrams = defaultdict(list)


	def __init__(self,stringfile):
		'''
		Constructor
		'''
		#open file 
		#self.sourceTextFile = open(filename) 
		# break into words 
		#lines = self.sourceTextFile.read()
		# surround all punctuation with spaces so that they are treated as their own word. 
		self.bigrams = None; 
		self.trigrams = None; 
		
		self.bigrams = defaultdict(list)
		self.trigrams = defaultdict(list)
		
		lines = stringfile.replace('\r\n',' ')
		lines = lines.replace('\n',' ')
		lines = lines.replace("\t",' ')
		#print "lines"
		
		#print lines 
		paras = lines.split('!@#')
		#print "paras"
		#print paras
		for para in paras: 
			#print "para"
			#print para
			for c in string.punctuation:
				para = para.replace(c,' '+c+' ')
			tempwords = para.split()
			if len(tempwords) == 0: 
				continue; 
			words = ['$start-3','$start-2','$start-1']
			words.extend(tempwords)
			words.extend(['$end_1','$end_2','$end_3','$end_4'])
			
			#print "words: "
			
			#print words; 
   			for i in range(0,len(words)-3): 
   				self.trigrams[(words[i],words[i+1],words[i+2])].append(words[i+3])
   		
   			for i in range(0,len(words)-2): 
   				self.bigrams[(words[i],words[i+1])].append(words[i+2])
		#print self.trigrams
		
		#self.sourceTextFile.close(); 
		del(lines)
		del(words)
		
		
	
	def generateText(self):
		autogen = []
		wordLeft = '$start-3'
		word = '$start-2'
		wordRight = '$start-1'
		flag = 0
		while(wordRight != '$end_4'):
			nextWordList = [];
			if flag:
				nextWordList = self.trigrams[(wordLeft,word,wordRight)]
			else:
				nextWordList = self.bigrams[(word,wordRight)]
			try:
				nextWord = choice(nextWordList)
			except IndexError:
				#print (wordLeft, word, wordRight)
				#print flag
				nextWord = nextWordList[0]
				
			autogen.append(nextWord)
			flag = choice([0,1])
			if flag :
				wordLeft = word
				word = wordRight
				wordRight = nextWord
			else :
				word = wordRight
				wordRight = nextWord
			if ('.' in nextWord and len(autogen) > 1000):
				break;
			
		txt = ' '.join(autogen)
		#for c in string.punctuation:
		#	txt.replace(' '+c,c)
		#txt = re.sub(r'(.*\.)(.*?)$',r'\1',txt);
		##format output here##
		txt = txt.replace('!@#','\n')
		txt = txt.replace('! @ #','\n')
		txt = txt.replace('$end_1 $end_2 $end_3 $end_4','')
		
		return txt
		
		
'''
Usage: 
markov = Markov(inputFile)
print markov.generateText()

'''
#stringuri = '''%21%40%23+++++++++++++++++++++++++++++++++++++++%0D%0AInformation+about+instruction+criticality+can+be+used+to%0D%0Acontrol+the+application+of+micro-architectural+resources+efficiently.%0D%0ATo+this+end%2C+several+groups+have+proposed+methods%0D%0Ato+predict+critical+instructions.+This+paper+presents+a+framework%0D%0Athat+allows+us+to+directly+measure+the+criticality+of+individual%0D%0Adynamic+instructions.+This+allows+us+to+%281%29+measure%0D%0Athe+accuracy+of+proposed+critical+path+predictors%2C+%282%29+quantify%0D%0Athe+amount+of+slack+present+in+non-critical+instructions%2C+and%0D%0A%283%29+provide+a+new+metric%2C+called+tautness%2C+which+ranks+critical%0D%0Ainstructions+by+their+dominance+on+the+critical+path.+This%0D%0Aresearch+investigates+methods+for+improving+critical+path+predictor%0D%0Aaccuracy+and+studies+the+distribution+of+slack+and+tautness%0D%0Ain+programs.+It+shows+that+instruction+criticality+changes%0D%0Adynamically%2C+and+that+criticality+history+patterns+can+be+used%0D%0Ato+significantly+improve+predictor+accuracy.%0D%0A%0D%0A%21%40%23%0D%0A%0D%0AMany+emerging+processor+microarchitectures+seek+to%0D%0Amanage+technological+constraints+%28e.g.%2C+wire+delay%2C%0D%0Apower%2C+and+circuit+complexity%29+by+resorting+to+nonuniform%0D%0Adesigns+that+provide+resources+at+multiple+quality%0D%0Alevels+%28e.g.%2C+fast%2Fslow+bypass+paths%2C+multi-speed+functional%0D%0Aunits%2C+and+grid+architectures%29.+In+such+designs%2C%0D%0Athe+constraint+problem+becomes+a+control+problem%2C+and%0D%0Athe+challenge+becomes+designing+a+control+policy+that%0D%0Amitigates+the+performance+penalty+of+the+non-uniformity.%0D%0AGiven+the+increasing+importance+of+non-uniform+control%0D%0Apolicies%2C+we+believe+it+is+appropriate+to+examine+them+in%0D%0Atheir+own+right.%0D%0ATo+this+end%2C+we+develop+slack+for+use+in+creating+control%0D%0Apolicies+that+match+program+execution+behavior+to%0D%0Amachine+design.+Intuitively%2C+the+slack+of+a+dynamic+instruction%0D%0Ai+is+the+number+of+cycles+i+can+be+delayed+with%0D%0Ano+effect+on+execution+time.+This+property+makes+slack+a%0D%0Anatural+candidate+for+hiding+non-uniform+latencies.%0D%0AWe+make+three+contributions+in+our+exploration%0D%0Aof+slack.+First%2C+we+formally+define+slack%2C+distinguish%0D%0Athree+variants+%28local%2C+global+and+apportioned%29%2C+and%0D%0Aperform+a+limit+study+to+show+that+slack+is+prevalent%0D%0Ain+our+SPEC2000+workload.+Second%2C+we+show+how%0D%0Ato+predict+slack+in+hardware.+Third%2C+we+illustrate+how%0D%0Ato+create+a+control+policy+based+on+slack+for+steering%0D%0Ainstructions+among+fast+%28high+power%29+and+slow+%28lower%0D%0Apower%29+pipelines.%0D%0A%0D%0A%21%40%23

#print urllib.unquote_plus(urllib.unquote(stringuri))
# markov = Markov(urllib.unquote_plus(urllib.unquote(stringuri)))
# print "gentext"
# print markov.generateText()
# inputfile = open('RandomTextTraining/abstracts.txt')
# markov = Markov(inputfile.read())
# print markov.generateText()

