from Lexicon import Lexicon
from ML import ML
from config import *
from TranslationModel import TranslationModel
from multiprocessing import Pipe, Process
import os
from os.path import join, exists

class OfflineTranslationModel:
	'''
	compute offline sum P(q|w)*P(w|D)
	'''

	def create_multi(self, begin, end, doc_list, trans_model):
#		ans = []
		for i in xrange(begin, end):
			doc = i - begin
			print 'Processing doc ' + str(i)
			dic = {}
			for wordid in doc_list[doc].getWordsList():
				extensionlist = trans_model.getExtensionList(wordid)
				for trans_id in extensionlist:
					if dic.has_key(trans_id):
						dic[trans_id] = dic[trans_id] + trans_model.getProb(wordid,	trans_id) * doc_list[doc].getProb(wordid)
					else:
						dic[trans_id] = trans_model.getProb(wordid, trans_id) *	doc_list[doc].getProb(wordid)
			f = open(join(tmp_path,str(i)), 'w')
			line = ''
			for (key, value) in dic.items():
				line = line + str(key) + ':' + str(value) + ' '
			f.write(line+'\n')
			f.close()
#			ans.append(dic)
#		conn.send(ans)
#		conn.close()

	def create(self,is_filt=False):
		if os.path.exists(tmp_path):
			os.rmdir(tmp_path)
		os.mkdir(tmp_path)
		lexicon = Lexicon()
		lexicon.load()
		doc_list = []
		offline_tm = []
		for doc in xrange(doccount):
			ml = ML(str(doc))
			ml.load()
			doc_list.append(ml)
		trans_model = TranslationModel()
		trans_model.load(is_filt)

		kernelnum = 16
#		connectPairs = []
		pools = []
		for i in xrange(kernelnum):
		#	connectPairs.append(Pipe())
			pools.append(Process(target=self.create_multi,args=(doccount*i/kernelnum,doccount*(i+1)/kernelnum, doc_list[doccount*i/kernelnum:doccount*(i+1)/kernelnum],trans_model)))
		for i in xrange(kernelnum):
			pools[i].start()
#		for i in xrange(kernelnum):
#			l = connectPairs[i][1].recv()
#			for m in l:
#				offline_tm.append(m)
#			connectPairs[i][1].close()
		for i in xrange(kernelnum):
			pools[i].join()
		
		wf = open(Offline_TM_path, 'w')
		for doc in os.listdir(tmp_path):
			f = open(join(tmp_path, str(doc)), 'r')
			wf.write(f.read())
			f.close()
		wf.close()

	def load(self):
		self.offline_tm = []
		f = open(Offline_TM_path, 'r')
		lines = f.readlines()
		f.close()
		for i in xrange(len(lines)):
			items = lines[i].split()
			dic = {}
			for item in items:
				dic[int(item.split(':')[0])] = float(item.split(':')[1].strip())
			self.offline_tm.append(dic)
	
	def getProb(self, docId, wordId):
		if self.offline_tm[docId].has_key(wordId):
			return self.offline_tm[docId][wordId]
		else:
			return 0.0

if __name__ == '__main__':
	otm = OfflineTranslationModel()
	otm.create(True)
	otm.load()
	print otm.getProb(5182, 10242)
