# -*- coding: utf-8 -*-

import os
from components.standalone_working_class import standalone_working_class
from datetime import datetime
from program_top.utilities.csv_and_json_serialisation import temperarily_load_a_local_json,temperarily_save_a_local_json
from program_top.utilities.string_and_unicode import decode_string_containing_utf_8
from program_top.utilities.sql_db import pyodbc_row_record2dict,sql_session

#from sqlalchemy import Table
import cPickle as pickle

def latin1_filter(original):
	'''把数据库引擎里面gbk的字符串(已经被误编码为latin1的unicode)重新编回gbk字符串，再用gbk解码'''
	if isinstance(original,unicode):
		return original.encode('latin1').decode('gbk')
	else:
		return original
	pass

class sql_curler(standalone_working_class):
	
	def conduct_table(self,source_table_name,destination_table_name):
		'''在两个数据库之间传导表格'''
		self.__reading_session.transmit_records(self.__writting_session,destination_table_name,source_table_name)
		pass
	
	def __copy_single_table(self,table_name):
		
		#if not self.__writting_session.contain_table(table_name):#如果没有目标表格
		original_table_meta=self.__reading_session.read_table_meta(table_name)
		self.__writting_session.create_table_with_meta(table_name,original_table_meta)
		self.conduct_table(table_name,table_name)
		print "%s,导入完成"%(table_name)
		#source_contents=self.__reading_session.read_all_records(table_name)
		#self.__writting_session.write_records(table_name,source_contents)
		
		pass
	
	def __transfer_table(self,mission_parameter):
		source_table_list=self.__reading_session.tables
		
		start_with_string=mission_parameter['table_name']['start_with']
		
		def start_with_any(table_name):
			for each_string in start_with_string:
				if table_name.startswith(each_string):
					return True
				pass
			return False
		
		target_tables_to_transfer=filter(start_with_any,[each_table['TABLE_NAME'] for each_table in source_table_list])
		
		for each_table_name in target_tables_to_transfer:
			if self.__writting_session.contain_table(each_table_name):
				print "%s,跳过，已完成"%each_table_name
				continue
			print "开始拷贝,%s"%(each_table_name)
			self.__copy_single_table(each_table_name)
			pass
		
		if self._business_config['clip_or_copy']:
			#如果是clip，删除表，否则用copy模式
			pass
		
		pass
	
	def __transfer_record(self,mission_parameter):
		pass
	
	def __init__(self):
		super(sql_curler, self).__init__()
		self.__name=datetime.now().__repr__()
		self.__mission_initilisation()
		self._business_mapper={'table_transfer': self.__transfer_table,
		 'record_transfer': self.__transfer_record,
		                       "procedure_transfer": self.__transfer_procedures}
		#self.__scan_tables()
		self.do_operation()
		pass
	
	def __transfer_procedures(self,mission_parameter):
		source_procedures=self.__reading_session.procedures
		procedures_already_in_destination=self.__writting_session.procedures
		start_with_string=mission_parameter['procedure_name']['start_with']
		start_with_lambda=lambda x: ((isinstance(x, str) or isinstance(x, unicode)) and x.startswith(start_with_string))
		
		target_procedures_to_transfer=filter(start_with_lambda,source_procedures.keys())
		procedures_already_trasfered=filter(start_with_lambda,procedures_already_in_destination.keys())
		source_set=set(target_procedures_to_transfer)
		exclude_set=set(procedures_already_trasfered)
		remaining_to_trans=source_set-exclude_set
		
		for each_procedure_name in list(remaining_to_trans):
			
			current_procedure_code=self.__reading_session.extract_procedure_code(each_procedure_name)
			print each_procedure_name
			try:
				self.__writting_session.odbc_cursor.execute(current_procedure_code)
				self.__writting_session.odbc_cursor.commit()
			except Exception, current_error:
				error_msg=current_error.__repr__()
				
				print error_msg
				pass
			pass
		pass
	
	def do_operation(self):
		try:
			self._business_mapper['procedure_transfer'](self._business_config['operations']['procedure_transfer'])
			self._business_mapper['table_transfer'](self._business_config['operations']['table_transfer'])
			
		except:
			
			pass
		
		pass
	
	def pull_data(self):
		tables=self.__mission_briefing['table_progress']['tables_yet_to_migrate']-self.__mission_briefing['table_progress']['table_copied']
		
		source, sengine=self.__reading_session.session,self.__reading_session.engine
		destination, dengine=self.__writting_session.session,self.__writting_session.engine
		
		if self.__mission_briefing['table_progress']['current_writing_table'] is not None:#如果之前有正在写入还没写完的表
			table_name_to_delete=self.__mission_briefing['table_progress']['current_writing_table']
			stub_table=Table(table_name_to_delete, self.__writting_session.metadata, autoload=True)
			stub_table.drop()#删除表，接下来重新传输
			self.__mission_briefing['table_progress']['current_writing_table']=None
			temperarily_save_a_local_json(self.__mission_briefing, self.__mission_briefing_filename)
			pass
		
		for table_name in tables:
			self.__mission_briefing['table_progress']['current_writing_table']=table_name
			
			print 'Processing', table_name
			current_table=Table(table_name, self.__reading_session.metadata, autoload=True)
			print 'Creating table on destination server'
			
			writing_table=Table(table_name, self.__writting_session.metadata)
			
			for column in current_table.columns:
				writing_table.append_column(column.copy())
			
			writing_table.create()
			
			current_table_buffer_name=self.__buffer_filename+'_'+table_name+'.pickle'
			
			if os.path.isfile(current_table_buffer_name):
				dump_fp=open(current_table_buffer_name,'r')
				current_table_all_data=pickle.load(dump_fp)
			else:
				current_table_all_data=source.query(current_table).limit(20).all()
				dump_fp=open(current_table_buffer_name,'w')
				pickle.dump(current_table_all_data,dump_fp)
				dump_fp.close()
				pass
			
			print 'Transferring records'
			for record in current_table_all_data:
				new_record=map(latin1_filter,record)
				
				destination.execute(writing_table.insert(new_record))
				#print 'Committing changes to table %s'%(table_name)
				
				pass
			destination.commit()
			
			
			self.__mission_briefing['table_progress']['current_writing_table']=None#本表传送完毕
			self.__mission_briefing['table_progress']['tables_yet_to_migrate'].remove(table_name)
			self.__mission_briefing['table_progress']['table_copied'].add(table_name)#加入传送完毕的表
			temperarily_save_a_local_json(self.__mission_briefing,self.__mission_briefing_filename)
			pass
		pass
	
	
	def __scan_tables(self):
		'''
		扫描所有数据表，得出当前迁移进度
		'''
		
		
		if not os.path.isfile(self.__mission_briefing_filename):#如果没有传输记录文件，说明任务刚刚开始
			self.__mission_briefing={
				'table_progress':{'tables_yet_to_migrate': self.__tables_readable, 'current_writing_table': None,'table_copied': set()},
				'record_progress':{'records_written':[]}
				}
			temperarily_save_a_local_json(self.__mission_briefing,self.__mission_briefing_filename)#创建进度文件
		else:
			self.__mission_briefing=temperarily_load_a_local_json(self.__mission_briefing_filename)#读取原来的进度文件
			
			pass
		
		pass
	pass
	
	def __mission_initilisation(self):
		'''
		数据迁移任务初始化
		'''
		self.__mission_briefing_filename=self._environment_pack['instance_config']['buffer_dir']+'locked_info.json'#潜在的迁移信息文件
		config_filename=self._environment_pack['instance_config']['config_file_dir']+'config.json'
		
		self.__buffer_filename=self._environment_pack['instance_config']['buffer_dir']+'buffer'#当前表的缓存文件前缀
		
		self._business_config=temperarily_load_a_local_json(config_filename)
		
		self.__reading_session=sql_session(self._business_config['source_db'])
		self.__writting_session=sql_session(self._business_config['target_db'])
		pass
	pass