import pymongo ;
import os ;
import re ;
import datetime;
from pymongo import MongoClient;
import time,sys
import itertools
from operator import itemgetter, attrgetter
from PyQt4.QtCore import QDir,QObject,QThread
from KDataBase import KGlobals
from KDataBase.KDBUtility import KDBUtility
from KDataBase.KDBProvider import KEODProvider,KStooqProvider
from KDataBase.KAbstractDB import KAbstractDB 
class KBuildDB(KAbstractDB):
	"""Build Historical Stock data from different Data Providers,should only be called by KDBManager
	"""
	# general database functions
	def __init__(self,parent=None):
		super(KBuildDB,self).__init__(self)
		self.bulklimit=5000
		
	def DoWork(self): ##called by new thread
		if self.ThreadArgs['Op']=="Import":
			finished=self.BuildAllHistory(self.ThreadArgs['Path'],self.ThreadArgs['Provider'])
		elif self.ThreadArgs['Op']=="Rebuild":
			finished=self.CleanUp()
		elif self.ThreadArgs['Op']=="Consolidate":
			finished=self.ConsolidateToSymbolBased()
		elif self.ThreadArgs['Op']=="OneClickImport":
			finished=self.OneClickImport(self.ThreadArgs['Path'])
		if finished==False: ## interuptted by user
			self.UpdateProgress("Interuppted by User")

	#Actual Database Build Operations.
	def CleanUp(self):
		self.client.drop_database(KGlobals.DatabaseName)
		self.db=self.client[KGlobals.DatabaseName]
		self.UpdateProgress(KGlobals.DatabaseName+ ' database is re-created')
		return True

	#import history data from files of specified data provider   
	def BuildAllHistory(self,sfilepath,sprovider):
		if sprovider=="EOD":
			self.provider=KEODProvider.KEODProvider(self,self.db)
		elif sprovider=="Stooq":
			self.provider=KStooqProvider.KStooqProvider(self,self.db)   
		if self.provider:
			self.provider.ProgressUpdated.connect(self.ProgressUpdated)     
			return self.provider.BuildAllHistory(sfilepath)
		
	def OneClickImport(self,sfilepath):
	# One Clik to clean- import-consolidate
		finished=self.CleanUp() 
		if finished==False: return False
		finished=self.BuildAllHistory(sfilepath+'\\stooq_us','Stooq')
		if finished==False: return False
		finished=self.BuildAllHistory(sfilepath+"\\EOD",'EOD')
		if finished==False: return False
		finished=self.ConsolidateToSymbolBased()
		if finished==False: return False
		return True
	

	def ConsolidateToSymbolBased(self):
	# Consolidate the database to be symbol based for accelerating query.
		self.start=time.clock()
		allcollection=self.db.collection_names()
		for each_collection in allcollection:
			if not KDBUtility.IsSymbolBased(each_collection) and each_collection !='system.indexes':
				(country,market)=each_collection.split(".",2)
				finished=self.ConsolidateThisMarket(country,market)
				if finished==False:##interuptted by user
					return False
				#break; # for test
		self.UpdateProgress("Finished Consolidating.") 
		
	def ConsolidateThisMarket(self,country,market): 
	# add collection which are based on symbol, instead of symbol+date, to consolidate database.
		colname=KDBUtility.GetCollectionName(country,market,True)
		self.db.drop_collection(colname)
		cldailystocks=self.db[KDBUtility.GetCollectionName(country,market,False)]
		newdailystocks=self.db[colname]
		updatecount=0
		upcontents=[]
		provider_dic=dict()
		self.UpdateProgress("searching all symbols for market: %s" %market)
		allsymbols=cldailystocks.find({},{"symbol":1}).distinct("symbol")
		allsymbols=list(set([ elem.upper() for elem in allsymbols]))
		self.UpdateProgress("all symbols  for market: %s obtained" %market)
		for each_symbol in sorted(allsymbols,reverse=True): # for each symbol	
			cursor=cldailystocks.find({"symbol":each_symbol.upper()},{"date":1,"price":1,"provider":1})
			dates=[]
			priceblock=[]
			alldata=[]
			
			#sort records and remove duplicates
			datalist=list(cursor)
			fieldlist=['date','price','provider']
			grouplist=[]
			for each_field in fieldlist:
				grouplist.append([x[each_field] for x in datalist])

			#convert price to float
			grouplist[1]=[[float(x) for x in y] for y in grouplist[1]]			
			alldata=zip(*grouplist)
			alldata=sorted(alldata, key=itemgetter(0), reverse=False) # sort based on date
			alldata=list(next(mgroup) for _,mgroup in itertools.groupby(alldata,lambda x:x[0])) # remove duplicates		
			alldata=zip(*alldata)
			#construct commands
			symbolcontent=[{"symbol":each_symbol.upper(),"date":alldata[0],"price":alldata[1]}]
			upcontents+=symbolcontent
			if self.Stop:
				return False
			updatecount=updatecount+1
			if updatecount%100==0:
				try:					
					newdailystocks.insert(upcontents) # bulk insert
					cursor=newdailystocks.find_one({"symbol":'AAPL'})
					if cursor:
						result=len(cursor['date'])
					
				except:
					print(sys.exc_info()[0])
				upcontents=[]
				elapsed=(time.clock()-self.start)          
				self.UpdateProgress('Consolidating - M: %s S#: %d T:%.2f' %(market,updatecount,elapsed))
				#return True # for test !!!
		if upcontents:
			newdailystocks.insert(upcontents) # bulk insert the rest				
		self.UpdateProgress("Ensure Index for M:%s" %(market))      
		newdailystocks.ensure_index([("symbol",1)]) ## build index      
		return True









