﻿#coding:utf-8

import fileinput
import os
import sys
import fnmatch
import logging
import linecache
import codecs
import tempfile
import re
import time
from os.path import join

#global variables
source_path= r"d:\Exchange\Utah-1\sources\dev"
#object_model_path = r"d:/Exchange/Utah-1/sources/dev/configuration/src/ObjectModel/"
#provision_path = r"d:/Exchange/Utah-1/sources/dev/management/src/provisioningagent/"
#data_provision_path =  r"d:/Exchange/Utah-1/sources/dev/management/src/datacenterprovisioningagent/"



exclude_word = "piiscrubber"
key_word = "class"
test_line = "internal class DataSourceManager : IDisposable, IConfigDataProvider " 


class CodeHandle:
	#logging part
	def create_log(self):
		logger = self.get_logger()
		fh = self.get_file_handler()
		ch = self.get_console_handler()
		formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
		fh.setFormatter(formatter)	
		ch.setFormatter(formatter)
		logger.addHandler(fh)
		logger.addHandler(ch)
		return logger

	def get_logger(self):
		logger = logging.getLogger('error_logger')
		logger.setLevel(logging.INFO)
		return logger

	def get_file_handler(self):
		file_handler = logging.FileHandler('error.log')
		file_handler.setLevel(logging.INFO)
		return file_handler

	def get_console_handler(self):
		console_handler = logging.StreamHandler()
		console_handler.setLevel(logging.INFO)
		return console_handler
			
	#print code in code_file.txt
	def print_code(self,file_name,line_number):
		code_file = open('code_file.txt','a')
		print('==================================================',file=code_file)
		print(file_name,file=code_file)
		for item in range(line_number-5,line_number+5):
			line = linecache.getline(file_name,item)
			print(line,file=code_file)
		linecache.clearcache()
		code_file.close()

	#process each line
	def process_line(self,line,pattern,file_name,result_file):
		pos = line.find(pattern)
		#find the match pattern 
		if pos != -1 :
			#get the file name
			#base = os.path.basename(file_name)	
			#get the line number of the file
			line_no = fileinput.filelineno()

			#get the begin and end index of the total string that contains pattern
			begin = line.rfind(' ',0,pos);
			end = line.find(".",pos,-1)
			if end == -1:
				end = line.find(" ",pos,-1)
			if end == -1:
				end = line.find(".",pos,-1)
			if end == -1:
				end = line.find(";",pos,-1)
			
			print(line[begin+1:end], line_no, file_name, sep=',',file=result_file)
			self.print_code(file_name,line_no)

	
	#process each line in multi files with a list of patterns
	#result is written into disk
	def run(self,file_list,pattern_list,result_file,process):
		process_log = open('process.log','w')
		result_list = list()
		tmp_list = list()
		file_name = ''
		with fileinput.input(files=file_list,mode='rb') as f:
			try:
				for line in f:
					if fileinput.isfirstline():
						file_name = fileinput.filename()
						print("handle file: ",file_name,file=process_log)	
						bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
						line = line.decode('utf-8','ignore').lstrip(bom_utf8)
					else:
						line = line.decode('utf-8','ignore')
					#print(line) 
					tmp_list = process(line, pattern_list, file_name, result_file) 
					if tmp_list:
						result_list.extend(tmp_list)
			

			except Exception as e:
				process_log.close()
				error_file = open('error.log','a')
				print("reasons: ", e," in file: ",file_name," line number is: ", fileinput.filelineno(),sep=',',file=sys.stdout)
				print("reasons: ", e," in file: ",file_name," line number is: ", fileinput.filelineno(),sep=',',file=error_file)
				error_file.close()
		process_log.close()
		return result_list
		

	#get the list of files which has the suffix pattern under a specified directory
	def get_file_list(self,file_path,include_pattern,exclude_pattern):
		result = list() 
		#result_file = open("file_handled.txt",'a')
		for root,dirs,files in os.walk(file_path):
			files.sort()
			flag = True
			for name in files:
				#get rid of those files with exclude patterns
				for item in exclude_pattern:
					if fnmatch.fnmatch(join(root,name),item.strip()):
						flag = False
				#write those files with include_patterns into a list
				if flag:
					for index in include_pattern: 
						if fnmatch.fnmatch(join(root, name),index.strip()):
							#print(join(root,name),file=result_file)
							result.append(join(root,name))
		#result_file.close()
		return result

	# get all filename and line number with the specified key_words_list and store them in a word specified dict 
	def get_mapped_files(self, key_word_list, file_list):
		tmp1 = open("tmp.log",'w')
		tmp2 = open("tmp2.log",'w')

		result = dict()
		match_pair_list = list()
		with fileinput.input(files=file_list,mode='rb') as f: 
			file_name = fileinput.filename()
			print("handle: ",file_name,file = tmp1)
			for line in f:
				if fileinput.isfirstline():
					bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
					line = line.decode('utf-8','ignore').lstrip(bom_utf8)
				else:
					line = line.decode('utf-8','ignore')
				for item in key_word_list:
					#transfer possible regex chars to raw chars
					item = re.escape(item)
					pattern = re.compile(item)
					match = pattern.search(line)
					if match:
						line_no = fileinput.filelineno()
						file_name = fileinput.filename()
						match_pair_list.append([file_name, line_no])
					result[item] = match_pair_list
					print(match_pair_list,item,sep=',',file=tmp2)
		return result
	
	def get_intersection(self, method_dict, class_dict):
		sorted_method_key = sorted(method_dict)
		sorted_class_key = sorted(class_dict)

		# sort two lists with each key, for intersection process
		tmp_list = list()
		for i in sorted_method_key:
			tmp_list = sorted(method_dict[i], key = lambda item:item[0]) 
			method_dict[i] = tmp_list

		tmp_list = list()
		for i in sorted_class_key:
			tmp_list = sorted(class_dict[i], key = lambda item:item[0]) 
			class_dict[i] = tmp_list

		intersection_file = open("class_name_intersection.txt",'w')
		i_list = list()
		j_list = list()
		index_i = 0
		index_j = 0
		for i in sorted_method_key:
			i_list = method_dict[i]
			i_len = len(i_list)
			for j in sorted_class_key:
				j_list = method_dict[j]
				j_len = len(j_list)
				while index_i < i_len and index_j < j_len: 
					if i_list[index_i] == j_list[index_j]: 
						print(i, item_i[1], j, item_j[1], item_i[0],sep=',',file=intersection_file)
						index_i = index_i + 1	

						index_j = index_j + 1	
					elif  i_list[index_i] < j_list[index_j]: 
						index_i = index_i + 1	
					else:
						index_j = index_j + 1	
		intersection_file.close()
		pass


	# get all the class that inheritate from the interface or class root
	# write the result into result_file 
	def get_classes(self,line,pattern,file_name,result_file):
		result_list = list()
		result=''
		# first  judge if this line is a comment line or has some htmls tags
		m = "//|tr"
		comment_pattern = re.compile(m)
		match = comment_pattern.search(line)
		if match:
			return result_list

		pos = line.find(pattern) 
		#find the match pattern
		if pos != -1:
			# we are searching thoses classes that inheritate from pattern,so the end_pos should be the pos we find the pattern before 
			class_pos = line.find("class",0,pos)
			if class_pos != -1:
				begin_pos = line.find(" ",class_pos,-1)
				begin_pos = begin_pos + 1
				end_pos = line.find(" ",begin_pos,-1)
				if begin_pos != -1 and end_pos != -1:
					result = line[begin_pos:end_pos]
					#last check to ensure it is the name  of a class type
					if result[0].isupper():
						result_list.append(result)
						print(result,file_name,sep=',', file=result_file) 
		return result_list
#		result=''
#		print(line)
#		print(pattern_list)
#		while pattern_list:
#			item = pattern_list[0]
#			pos = line.find(item) 
#		#find the match item
#			pattern_list.remove(item)
#			if pos != -1:
#				class_pos = line.find("class")
#				if class_pos != -1:
#					begin_pos = line.find(" ",class_pos,-1)
#					#the begining position of class name
#					if begin_pos != -1:
#						begin_pos = begin_pos + 1
#						end_pos = line.find(" ",begin_pos,-1)
#							# the end position of class name
#						if end_pos != -1:
#							result = line[begin_pos:end_pos]
#							print(result)
#							pattern_list.append(result)
#							print(result,file_name,sep='\r\n', file=result_file) 
#
	#get all methods of a specified class  given the class name and the filename where it locates and write the record into disk
#	def get_methods_of_class(self, type_name, file_name):
#		result_file = open('class_methods.txt')
#		with fileinput.input(files=file_name, mode='rb') as f:
#			try:
#				for line in f:
#					if fileinput.isfirstline():
#						bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
#						line = line.decode('utf-8','ignore').lstrip(bom_utf8)
#					line = line.decode('utf-8','ignore')
#
#
#				
#			except Exception as e:
#				result_file.close()
#				error_file = open('error.log','a')
#				print("reasons: ", e," in file: ",file_name," line number is: ", fileinput.filelineno(),sep=',',file=sys.stdout)
#				print("reasons: ", e," in file: ",file_name," line number is: ", fileinput.filelineno(),sep=',',file=error_file)
#				error_file.close()
#
#		result_file.close()

#function test
def test(b):
	print(b)
	line = test_line
	result = ''
	pos = line.find("IConfigDataProvider")
	if pos != -1:
		class_pos = line.find("class")
		if class_pos != -1:
			begin_pos = line.find(" ",class_pos,-1)
			begin_pos = begin_pos + 1
			end_pos = line.find(" ",begin_pos,-1)
			if begin_pos != -1 and end_pos != -1:
				result = line[begin_pos:end_pos]
			else:  
				print("class name not found",file=sys.stderr)
	print(result)

#function object parameter test
def test1(test):
	[test2(i,j) for i in range(1,5) for j in range(100,200)]
	
def test2(i,j):
	return [i,j]

def test3():
	a = [1,2]
	b = [3,5]
	for i in range(5): 
		a.extend(test2(i,i))
	print(a)

def get_file_list():

	#get rid of those files with name containing Schema Pii or Exception and keep the rest with suffix .cs
	p = '(?!.*Schema.*|.*Pii.*|.*Exception.*).*\.cs$'
	p_r = re.compile(p)

	result = list()

	for root,dirs,files in os.walk(source_path):
		for i in files:
			if p_r.match(i):
				result.append(join(root,i))
	
	return result 

# get files and write them into disk
def stage_1():
	file_list = get_file_list()

	file_list_file = open('./data/files_collect.txt','w')
	count = 0
	for item in file_list:
		print(item,file=file_list_file)
		count = count + 1;
	file_list_file.close()
	print(count," records processed")

def stage_2():
	start = time.time()
	# to make sure get rid of those lines started with //
	#p = '(?!^\/{2,3}.*$).*IIdentityParameter.*$'
	#p_regex = re.compile(p)
	#p_r_list = [p_regex]
	#p_escape = re.escape(p)
	
	p_list = level2_list()
	p_r_list = list()
	for item in p_list:
		p_r_list.append(re.compile(item))

	start = time.time()
	file_list = list()
	with open('./data/files_collect.txt','r') as  f:
		for line in f:
			file_list.append(line.strip())
	
	f_result = open('./data/level5.raw','w')
	f_result_assistant = open('./data/assitant5.raw','w')

	total = 0

	bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
	for item in file_list:
		f_cache = list()
		f_cache = read_file(item)
		f_cache_len = len(f_cache)
		count = 0
		for line in f_cache:
			total = total + 1
			count = count + 1
			for item in p_r_list:
				p_match = item.match(line)
				if p_match:
					print("========================================\r\nfile_name: ",item,"  line number:",count,file=f_result_assistant)
					print(line,file=f_result)
					for index in range(count - 5, count + 5): 
						if index < f_cache_len:
							l = f_cache[index]
							if l.count('/') == 0:
								print(l,file=f_result_assistant)

	f_result.close()
	f_result_assistant.close()
	end = time.time()
	print(total," lines are processed")
	print(end - start,' seconds consumed!')


def new_task():
	#stage_1()
	stage_2()
	pass


def work2():
	#start work and prepare it 
	ch = CodeHandle()
	#including file_path
	file_path = [management_path,object_model_path]
	#provision_path,data_provision_path]

	#pattern part
	include_pattern = ['*.cs']
	exclude_pattern = ['*Schema*','*Pii*']

	# get all the files from specified directories into a list
	file_list = list()
	for item in file_path:
		file_list.extend(ch.get_file_list(item,include_pattern,exclude_pattern))	
		
	#write the list of files into disk 
	file_list_file = open('files.txt','w')
	for item in file_list:
		print(item,file=file_list_file)
	file_list_file.close()

	method_name_list = ['Delete', 'Find', 'FindPaged', 'Read', 'Save', 'Get', 'Cache', 'Session']

	class_name_list = list()
	# to fill class name list from local file 
	with open('classes.txt') as f:
		for line in f:
			line = line.strip("\n")
			if line:
				class_name_list.append(line)
	print(class_name_list)
	
	method_dict = dict()
	method_dict = ch.get_mapped_files(method_name_list, file_list)
	#class_dict = dict()
	#class_dict = ch.get_mapped_files(class_name_list,file_list)

	#method_file = open("method_file.txt")
	#for item in method_dict.keys():
	#	print(item, method_dict[item],file=method_file)
	#method_file.close()

	#class_file = open("class_file.txt")
	#for item in class_dict.keys():
	#	print(item, class_dict[item],file=class_file)
	#class_file.close()
	#ch.get_intersection(method_dict,class_dict)
	###################################################
	#sorted_method_key = sorted(method_dict)
	#sorted_class_key = sorted(class_dict)

	## sort two lists with each key, for intersection process
	#tmp_list = list()
	#for i in sorted_method_key:
	#	tmp_list = sorted(method_dict[i], key = lambda item:item[0]) 
	#	method_dict[i] = tmp_list

	#tmp_list = list()
	#for i in sorted_class_key:
	#	tmp_list = sorted(class_dict[i], key = lambda item:item[0]) 
	#	class_dict[i] = tmp_list

	#intersection_file = open("class_name_intersection.txt",'w')
	#i_list = list()
	#j_list = list()
	#index_i = 0
	#index_j = 0
	#for i in sorted_method_key:
	#	i_list = method_dict[i]
	#	i_len = len(i_list)
	#	for j in sorted_class_key:
	#		j_list = method_dict[j]
	#		j_len = len(j_list)
	#		while index_i < i_len and index_j < j_len: 
	#			if i_list[index_i] == j_list[index_j]: 
	#				print(i, item_i[1], j, item_j[1], item_i[0],sep=',',file=intersection_file)
	#				index_i = index_i + 1	

	#				index_j = index_j + 1	
	#			else if  i_list[index_i] < j_list[index_j]: 
	#				index_i = index_i + 1	
	#			else:
	#				index_j = index_j + 1	
	#intersection_file.close()

			



def main():
	work2()
	pass

#work1 process
def work1():
	#start work and prepare it 
	ch = CodeHandle()
	#including file_path
	file_path = [management_path,object_model_path]
	#provision_path,data_provision_path]

	#pattern part
	include_pattern = ['*.cs']
	exclude_pattern = ['*Schema*','*Pii*']

	# get all the files from specified directories into a list
	file_list = list()
	for item in file_path:
		file_list.extend(ch.get_file_list(item,include_pattern,exclude_pattern))	
		
	#write the list of files into disk 
	file_list_file = open('file_list.txt','w')
	for item in file_list:
		print(item,file=file_list_file)
	file_list_file.close()

	# want to get all the classes selected
	result_file = open("class_set.txt",'w')

	#the patten we use to filter the code
	pattern_list = ['IConfigDataProvider','AvailabilityProvider','IADDataProvider']
	#,' AvailabilityProvider', 'IADDataProvider']

	# not in use
	exclude_pattern_list = list()

	# run the work
	#for item in pattern_list:
	#	ch.run(file_list, item, result_file, ch.get_classes)
	#result_file.close(.)

	#get the whole hierarchy of the base class
	tmp_list = list()
	while pattern_list:
		item = pattern_list[0]
		tmp_list = ch.run(file_list, item, result_file, ch.get_classes)
		pattern_list.remove(item)
		if tmp_list:
			pattern_list.extend(tmp_list)
	result_file.close()

#read files with bom
def test4(fname):
	with fileinput.input(files=fname,mode='rb') as f:
		try:
			for line in f:
				if fileinput.isfirstline():
					bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
					#line = line.decode('utf-8').lstrip(bom_utf8)
				else:
					line = line.decode('utf-8','ignore')
		except Exception as e:
			print(e,line,fileinput.filelineno(),sep='\r\n')


#iterator test
def iter_test():
	l = [i for i in range(5)]
	count = 0
	while l:
		item = l[0]
		print(l,count) 
		if count % 3 == 0:
			l.append(item)
		if count == 50:
			break

		l.remove(item)
		count += 1
	print(l) 

def signs_find_test():
	pattern = "//"
	a = "hello,// hi my name is"
	pos = a.find(pattern)
	if pos != -1:
		print(a[pos:-1])
	else:
		print("not found")

def set_test():
	a = set()
	for i in range(5):
		a.add(i)
	
	if 1 in a:
		print("1 in a") 
	print(a)

	a.discard(9)
	a.discard(4)
	print(a)
	a.add(100)
	print(a)
	count = 0
	while a:
		if count == 20:
			break
		b = a.pop()
		a.add(b+1)
		count  = count + 1
		print(b)

def level2_list():
	r = read_file('./data/level4.record')
	size = len(r)
	for i in range(size):
		r[i] = '(?!^\/{2,3}.*$).*' + r[i] + '.*$'
	return r


def regex_test():
	r = read_file('./data/level1.record')
	size = len(r)
	for i in range(size):
		r[i] = '(?!^\/{2,3}.*$).*' + r[i] + '.*$'
	for item in r:
		print(item)

	p = '(?!^\/{2,3}.*$).*IConfigDataProvider.*$'
	print(p)


		
	#s = ['sdfsSchema.cs','sdfdPiisdfsd.cs','sdfsdExceptiondfsdf.cs','special.cs']
	#for i in s:
	#	match = p_r.match(i)
	#	if match:
	#		print('success')
	#	else:
	#		print('failed')
	#
	#f_name = r'D:\Exchange\Utah-1\sources\dev\data\src\storage\Management\MailboxConfiguration\MailboxJunkEmail\MailboxJunkEmailConfigurationDataProvider.cs'
	#f_cache = read_file(f_name)
	#for item in f_cache:
	#	print(item)
	#for line in f_cache:
	#	p_match = p_r.match(line)
	#	if p_match:
	#		print(line)


	#m = "//|="
	#pattern = re.compile(m)
	#s = "spe=cial // class Asper : m"

	##match = pattern.match(s)
	#match = pattern.search(s)
	#if match:
	#	print(match.group(),match.start(),match.end(),s) 
	#	#print(match,s)

def upper_test():
	s = "hEllO,WorLd,h"
	for item in s:
		if item.isupper():
			print(item,end='')

def if_test():
	i = 5
	if i % 2 :
		print(i)
	else:
		print(i+1)
	i = i+1
	print(i*5)

def dict_test():
	key = "hi"
	t = list()
	result = dict()
	for i in range(5):
		t.append([i,i*2])
	result[key] = t

def sort_howto():
	s = [[1,'D'], [2,'B'], [3,'B'], [4,'E'], [5,'A']]
	m = sorted(s, key = lambda item: item[1])
	print(m)
	pass

def read_file(f_name):
	#get rid of those lines with sign // or "
	p = '.*["/].*$' 
	p_r = re.compile(p)
	bom_utf8 = codecs.BOM_UTF8.decode('utf-8')

	cache = list()
	result = list()
	with open(f_name,'rb') as f:
		cache = f.readlines()
	for i in range(len(cache)):
		m = cache[i].decode('utf-8','ignore').lstrip(bom_utf8).strip()
		match = p_r.match(m)	
		if not match:
			result.append(m)

	return result
		

	

if __name__ == '__main__':

	new_task()
	#read_file()
	#sort_howto()
	#main()
	#dict_test()
	#if_test()
	#regex_test()
	#upper_test()
	#set_test()
	#test4("special.txt")
	#test3()
	#test1(test)
	#test()
	#main()
	#signs_find_test()
	#test7()
	#iter_test()
	pass

	#item = r"d:\Exchange\Utah-1\sources\dev\management\src\Management\RecipientTasks\mailbox\DisableMailbox.cs"

	#with fileinput.input(files=file_list) as f:
	#	for line in f:
	#		if fileinput.isfirstline():
	#			file_name = fileinput.filename()
	#			print(file_name)
		
