#==================================================
#File name: auto_task.py
#author:Lihua Wu
#Email: t_lihwu@microsoft.com
#Description:Find all classes, methods, variables related to data access
#==================================================

import re
import time
import sys
import os
import multiprocessing
import logging
from os.path import join, getsize
import codecs
import configparser
import argparse
#==================================================
#Description:
#the start source_path
#==================================================
source_path= r"d:\Exchange\Utah-1\sources\dev"

#==================================================
#Description:
#global logging configuration
#Levels :critical > error > warning > info > debug > notset
#logger module can be used to store result and display messages
#Example use:
#logger.error("this is an error message")
#logger.info("this is an info message")
#logger.critical("this is a critical message")
#logger.setLevel(logging.ERROR)
#logger.error("this is an error message")
#logger.info("this is an info message")
#logger.critical("this is a critical message")
#logger.setLevel(logging.INFO)
#logger.info("this is an info message")       
#logger.critical("this is an critical message")
#==================================================
logger = logging.getLogger()
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
console_formatter = logging.Formatter('[%(levelname)s] - %(message)s')
#file output
handler = logging.FileHandler("task_log.txt")
handler.setLevel(logging.NOTSET)

#shell output
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.CRITICAL) 
handler.setFormatter(formatter) 
console_handler.setFormatter(console_formatter)

logger.addHandler(handler)
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)

#==================================================
#Parameter
#@f_name : the name of the file needs to be read
#Return
#a list with each line as an element
#Description:
#files are read in as bytes. Bytes are decoded as utf-8 and bom characters are stripped
#==================================================
def read_file(f_name):
	cache = list()
	bom_utf8 = codecs.BOM_UTF8.decode('utf-8')
	with open(f_name,'rb') as f:
		cache = f.readlines()
	for i in range(len(cache)):
		cache[i] = cache[i].decode('utf-8', 'ignore').lstrip(bom_utf8)
	return cache

#==================================================
#Parameter
#@s: the regular expressions
#Return
#the compiled regex pattern
#Description:
#the input regex is compiled and returned ,for match use
#==================================================
def regex_pattern(s):
    return re.compile(s)
    
#==================================================
#Parameter
#@path: start directory
#Return
#list,with each line showing the file's name
#Description:
#walk the directory and get a list of files with specific patterns
#==================================================
def walk_dir():
    config = read_config()
    path = config['Path']['Path']
    p = '(?!.*Schema.*|.*Pii.*|.*Exception.*).*\.cs$'
    p_r = regex_pattern(p)
    result = list()
    for root, dirs, files in os.walk(path):
        for i in files:
            if p_r.match(i):
                result.append(join(root, i))
    with open('file_list.txt','w') as f:
        for item in result:
            print(item,file=f)
            f.flush()
    message = '%s files have been selected!' %(len())
    logger.info(message)
    return result 

#==================================================
#Parameter:
#@message: message -- a str to log
#Description:
#short for logger.info()
#==================================================
def log(message):
    logger.info(message)

#==================================================
#Description:
#read configuration information from config.ini 
#==================================================
def read_config():
    config = configparser.ConfigParser()
    config.read('config.ini')
    return config


#==================================================
#Description:
#read methods info of each type from config.ini
#==================================================
def read_method_info():
    config = read_config()
    result = dict()
    for key in config['ClassType']:
        result[key] = config['ClassType'][key]

    for i in result.keys():
        message = '%s : %s' %(i, result[i])
        logger.debug(message)
    
    
    for i in result.keys():
        value = result[i]
        tmp = value.split(',')
        tmp = [i.strip() for i in tmp]
        logger.debug(tmp)

    return result

    





#==================================================
#Description:
#read regex configuration information from config.ini
#==================================================
def read_config_test():
    config = configparser.ConfigParser()
    config.read('config.ini')
    for key in config['Regex']:
        message = key+' '+config['Regex'][key]
        logger.info(message)
    logger.info(config.sections())
    for s in config.sections():
        for key in config[s]:
            message = key + ':' + config[s][key]
            log(message)

#==================================================
#Description:
#read regex configuration information from config.ini
#==================================================
def write_config():
    config = configparser.ConfigParser()
    config['Regex'] = {}
    config['ClassType'] = {
            'IConfigDataProvider' : 'Read, Find, FindPaged, Save, Delete',
            'IAvailabilityProvider' : 'SetAvailability, GetAvailability', 
            'IDirectorySession' : 'Analyze, Apply, Check, Execute, Find, Get, Read, Retrieve, Run, Save, Update, Verify',
            'IDiscoverySearchDataProvider' : 'GetAll, Find, CreateOrUpdate, Delete',
            'IADDataProvider' : 'Run',
            'LiveIdManager' : 'EvictMember, EvictUnmanagedMember, GetNetIdFromMemberName, GetMemberNameFromNetId, SetMemberPassword, CreateMember, GetMemberType, MemberHasHotmailMailbox, ImportMember, DeleteMember, IsMemberFederated, RenameMember, RemoveMember, RecoverMember, BlockCredential, UnblockCredential, Dispose'
            }
    config['Type'] = {
            'Type' : 'IConfigDataProvider, IAvailabilityProvider, IADDataProvider,LiveIdManager, IDirectorySession, IDiscoverySearchDataProvider'
            }

    config['Regex'] = {
            'level1':'(?!.*//.*|.*#region.*).*',
            'level2':'.*(interface|class)\s([5a-zA-Z]+).*:.*'
            }
    config['Path'] = {
            'Path':'d:\\Exchange\\Utah-1\\sources\\dev'
            }
    with open('config.ini', 'w') as f:
        config.write(f)
    return config

#==================================================
#Description:
#preparation for phase 2 processing
#read initial classes from config.ini
#write info to iterate.txt
#==================================================
def init():
    log("initializing")
    config = read_config()
    result = config['Type']['Type'].split(',')
    with open('iterate.txt','w') as f:
        for i in result: 
            i = i.strip()
            print(i, i, sep = ',', file = f)
    with open('classes.txt','w') as f:
        for i in result: 
            i = i.strip()
            print(i, i, sep = ',', file = f)
    with open('status.txt', 'w') as f:
        print("True",file=f)

#==================================================
#Description:
#print a delimiter line
#==================================================
def cout():
    print('--------------------------------------------------')

#==================================================
#Description:
#read class info from formatted files
#==================================================
def read_class_info(f_n):
    result = dict()
    lines = read_file(f_n)
    for i in lines:
        fields = i.strip().split(',')
        if len(fields):
            key = fields[0]
            value = fields[1]
            set1 = result.setdefault(key, set())
            set1.add(value)
    pass
    return result

#==================================================
#Description:
#move info from config.ini to iterate.txt for function get_class_list to use
#==================================================


#==================================================
#Description:
#This method is meant to get a list of related class types, from existing  files and to base on existing types to detect new related types that we don't know!
#==================================================
def get_class_list():
    #a raw filter
    level1 = dict()
    #a more specific filter for class and interface
    level2 = dict()
    regex =[level1, level2]

    #get iterated process info from file
    # store info in regex for processing
    iterate_result= read_class_info('iterate.txt')
    for key in iterate_result.keys(): 
        set1 = regex[0].setdefault(key, set())
        set2 = regex[1].setdefault(key, set())
        value = iterate_result[key]
        for i in value:
            i = i.strip()
            p1 = '(?!.*//.*|.*#region.*|.*Entry.*).*' + i + '.*' 
            p2 = '.*(interface|class)\s([0-9a-zA-Z]+).*:.*' + i + '.*' 
            set1.add(p1)
            set2.add(p2)

    #read file list from disk
    #strip the meaningless chars
    file_list = read_file('file_list.txt')
    file_list = [i.strip() for i in file_list]

    #result store container
    record = set()
    #per record 
    result = tuple()
    #line counting for debug info
    count = 0
    for i in file_list:
        line_cache = read_file(i)
        for line_no in range(len(line_cache)):
            #the ith line
            count = count + 1
            i_line = line_cache[line_no].strip()
            #--------------------------------------------------
            # raw filter part
            #for item in regex[0].keys():
            #    for i in regex[0][item]:
            #        i = i.strip()
            #        i_regex = regex_pattern(i)
            #        match = i_regex.match(i_line)
            #        if match:
            #            print(item, i_line, line_no,file=level1_ass)
            #            result = [item, i_line]
            #            record.add(result) 
            #--------------------------------------------------

            for item in regex[1].keys():
                for i_r in regex[1][item]:
                    i_regex = regex_pattern(i_r)
                    match = i_regex.match(i_line)
                    if match :
                        logger.critical(match.group(2))

                        result = (item, match.group(2))
                        record.add(result)
                        logger.critical(result)
                        #print(item, match.group(2), sep=',', file=iterate)




    #merge new result with existing result 
    message = repr(count) + ' lines have been processed!'
    logger.critical(message)
    #--------------------------------------------------
    #file part
    # for raw store
    #level1_ass = open('level1_ass.txt','w')
    #for iterate store two column big type,sub type
    iterate = open('iterate.txt','w')
    #level1_ass.flush()
    #iterate.flush()
    #level1_ass.close()
    for item in record:
        print(item[0],item[1],sep = ',', file = iterate)
    iterate.close()
    #--------------------------------------------------

    merge()
    pass

#==================================================
#Description:
#merge new result with existing result 
#write the final result in classes.txt
#write new needed iterated info in iterate.txt
#==================================================
def merge():
    print('merging')
    config = read_config()
    keys = config['Type']['Type'].split(',')
    iterate_result = read_class_info('iterate.txt')
    total_result = read_class_info('classes.txt')
    for i in keys:
        i = i.strip()
        set1 = iterate_result.setdefault(i, set())
        set2 = total_result.setdefault(i, set())
        union_set = set1 | set2
        diff_set = set1 - set2
        iterate_result[i] = diff_set 
        total_result[i] = union_set 

    cnt1 = 0
    cnt2 = 0

    with open('iterate.txt', 'w') as f:
        for i in keys:
            i = i.strip()
            for item in iterate_result[i]:
                cnt1 = cnt1 + 1
                print(i, item, sep = ',', file = f)
    if cnt1 == 0:
        with open('status.txt','w') as f:
            print("False",file=f)


    with open('classes.txt', 'a') as f:
        for i in keys:
            i = i.strip()
            for item in total_result[i]:
                cnt2 = cnt2 + 1
                print(i, item, sep = ',', file = f)
    message = "total number of classes: " + repr(cnt2) + " total number of iterates: " + repr(cnt1) + '\r\n'
    logger.critical(message)
    pass

#==================================================
#Parameter
#
#Description:
#print code snippest around
#==================================================
def print_code(c,pos):
    begin = pos - 5 if pos - 5 > 0 else 1 
    end = pos + 5 if pos + 5 < len(c) else len(c) - 1
    code = ''
    for i in range(begin, end):
        code = code + c[i]
    return code

#==================================================
#Description:
#when another process needs to run a comparatively long time ,this method is called to act as a progress bar
#==================================================
def show_wait(): 
	while True:
		print("-",end='')
		sys.stdout.flush() 
		time.sleep(1.0)

#==================================================
#Description:
#
#==================================================
def parse_test():
    parser = argparse.ArgumentParser(description='Process some integers')
    parser.add_argument('integers', metavar='N', type=int, nargs='+', help = 'an integer for the accumulator')
    parser.add_argument('--sum', dest='accumulate',action='store_const', const=sum,default=max, help = 'sum the integers(default: find the max)')
    args = parser.parse_args()
    print(args.integers)
    print(args.accumulate)
    pass

def test():
    #f = open("tmp.json")
    #s = json.load(f)
    #print(s)
    #print(s.keys())
    #for i in s.keys():
    #    log(i)
    #s = '#regiion#reg'
    #m = 'reg'
    #print(s.find('#'))
    #print(m.remove('#'))
    #s = '(hello|spec).*'
    #p = "ospec"
    #p1 = regex_pattern(s)
    #if p1.match(p):
    #    print(p)
    #else:
    #    print("failed")
    ##f = list()
    ##for i in range(3):
    ##    name = repr(i) + '.txt'
    ##    f.append(open(name,'w'))
    ##

    ##values = {
    ##        '1':f[0],
    ##        '2':f[1],
    ##        '3':f[2]
    ##        }
    ##for i in values.keys():
    ##    print('hi',file=values.get(i))
    a = {1,2,3}
    b = {2,3,4,5}
    print(a-b, a|b)
    a = a - b
    b = a | b
    print(a, b)
    cout()
    s = list()
    for i in range(10):
        s.append(repr(i) + ';;')
    print(s)
    s = [i.strip(';') for i in s]
    print(s)
    cout()

    a = dict()
    b = dict()

    for i in range(3):
        a[i] = s
        tmp = b.setdefault(i,list())
        tmp.append(s)
        tmp.append(s)

    print(a)
    print(b)
    print(len(a), len(b))
    cout()
    s = ' internal class TenantStoreDataProvider : EwsStoreDataProvider'
    p = '.*(interface|class)\s([0-9a-zA-Z]+).*:.*' + 'EwsStoreDataProvider' + '.*'
    p1 = '.*(interface|class)\s([5a-zA-Z]+).*:.*' + 'AggregationSubscriptionDataProvider' + '.*'
    s1 = "    internal sealed class ConnectSubscriptionDataProvider : AggregationSubscriptionDataProvider "
    r = regex_pattern(p1)
    match = r.match(s1)
    if match:
        print(match.group(2))
    else:
        print("failed")

    cout()
    cout()
    #variable test
    s1 = '   IConfigDataProvider icf = new IConfigDataProvider('  
    s2 = '    return ((IConfigDataProvider)session).FindPaged<T>(filter, rootId, deepSearch, null, 0);'
    s3 = 'internal delegate IConfigurable GetDataObjectDelegate(IIdentityParameter id, IConfigDataProvider session, ObjectId rootID, OptionalIdentityData optionalData, LocalizedString? notFoundError, LocalizedString? multipleFoundError);'  
    s4 = ' internal delegate IConfigurable GetDataObjectDelegate(IIdentityParameter id, IConfigDataProvider session, ObjectId rootID, OptionalIdentityData optionalData, LocalizedString? notFoundError, LocalizedString? multipleFoundError); '
    s5 = '             IConfigDataProvider dataSession = (ITopologyConfigurationSession)this.DataSession; '
    s7 = "             IConfigDataProvider session = (IConfigDataProvider)this.CreateAdSession(); "
    s8 = '      /// <param name="session">IConfigDataProvider to resolve the wellknown guid</param> '  
    s9 = ' private IConfigDataProvider CreateSession() ' 
    s10 = '                IConfigDataProvider session = (IConfigDataProvider)this.CreateAdSession(); '
    #method test
    s6 = '            return dataSession.FindPaged<ADComplianceProgram>('

    p1 = '.*dataSession.([a-zA-Z<>]+)'
    p1 = regex_pattern(p1)
    match = p1.match(s6)
    if match:
        print(match.group(1))
    else:
        print("failed")
    cout()
    s = [s1,s2,s3,s4,s5,s7,s8,s9,s10]
    p = '.*?IConfigDataProvider[)\s]([a-z][a-zA-Z0-9]+)' 
    p = regex_pattern(p)
    cnt = 0
    for i in s:
        cnt = cnt + 1
        match = p.match(i)
        if match:
            print(cnt,match.group(1))
        else:
            print("failed")
    cout()
    cout()
    config = read_config()
    path = config['Path']['Path']
    print(path)
    cnt = 0
    for root, dirs, files in os.walk(path):
        if cnt == 100:
            break
        for i in files:
            print(i)
            cnt = cnt + 1
    pass

#==================================================
#Description:
#This function is meant to decide when to get_class_list
#Check the status through read files from a specific file
#==================================================
def check_status():
    with open("status.txt") as f:
        line = f.readline().strip()
        return line == "True"

#==================================================
#Description:
#This method is meant to get the size of a specific file
#==================================================
def size(f_name):
    while True:
       s = getsize(f_name)
       message = "size of %s is %s" %(f_name, s)
       print(message)
       time.sleep(10)

#==================================================
#Description:
#get a list of method called by specific class type which we get in phase 2
#firstly, we extract those variables of specific class type
#secondly,  we access files where those variables locate and extract the method called by these variables 
#==================================================
def get_method():
    class_info = read_class_info('classes.txt')
    #a raw filter
    level1 = dict()
    # a more specific filter to extract variables from files
    level2 = dict()
    regex = [level1, level2]
    #store info in regex for processing
    for key in class_info.keys():
        set1 = regex[0].setdefault(key, set())
        set2 = regex[1].setdefault(key, set())
        #second type
        value = class_info[key] 
        for i in value:
            i = i.strip()
            #p1 = '.*' + i + '[)\s]([a-zA-Z0-9]+)'
            set1.add(i)
    
    #read file list from disk
    #strip meaningless chars
    file_list = read_file('file_list.txt')
    file_list = [i.strip() for i in file_list]

    #result store container
    record = set()
    #line counting for debug info
    count = 0
    for i in file_list:
        line_cache = read_file(i)
        for line_no in range(len(line_cache)):
            count = count + 1
            i_line = line_cache[line_no].strip()
            if i_line.find('/') != -1:
                continue
            for item in regex[0].keys():
                for i_r in regex[0][item]:
                    sub_type = i_r
                    i_r = '.*?' + sub_type + '[)\s]([a-z][a-zA-Z0-9]+)'
                    i_regex = regex_pattern(i_r)
                    match = i_regex.match(i_line)
                    if match:
                        match_item = match.group(1)
                        #get rid of variables like this ,is ,or some other obviously should not be included
                        if match_item == 'this' or len(match_item) < 3:
                            continue
                        logger.critical(match.group(1))
                        logger.critical(i_line)
                        result = '%s,%s,%s,%s' %(item, sub_type, match.group(1), i)
                        record.add(result)
                        message = '%s - %s' %(sub_type, match.group(1))
                        log(result)
    

    message = '%s variables got in total!' %len(record)
    log(message)
    f = open('variable_ref.txt','w')
    f1 = open('variable.csv','w')
    for item in record:
        value = item.split(',')
        value = [i.strip() for i in value]
        print(value[0],value[1],value[2],sep=',', file=f)
        print(item, file = f1)
    f.close()
    f1.close()
    f2.close()

    inner_method(record)
        
#==================================================
#Description:
#get method of specific variable
#inner function for get_method
#==================================================
def inner_method(record): 
    #output store
    output = set()
    #for method mapping
    method_tmp = read_method_info()
    methods = dict()
    for i in method_tmp.keys():
        p = method_tmp[i]
        value = p.split(',')
        value = [i.strip() for i in value]
        methods[i] = value

    ref = open('result_reference.txt','w')
    for i in record:
        r = i.split(',')
        # r format is: Type SubType Variable Path
        r = [m.strip() for m in r]
        v = r[2]
        p = '.*'+ v + '\.([A-Z][a-zA-Z<>]+)'
        p = regex_pattern(p)
        path = r[3]
        lines = read_file(path)
        cnt = 0
        for l in lines:
            cnt = cnt + 1
            match = p.match(l)
            # igonre those lines with //
            if l.find('/') != -1: 
                continue
            if match:
                # result record format is : Type SubType Variable Method LineNumber Path
                result = '%s,%s,%s,%s,%s,%s' %(r[0],r[1],r[2],match.group(1),cnt,path)
                output.add(result)
                message = '%s - %s - %s - %s' %(r[0],r[1],r[2],match.group(1))
                log(message)
                codes = print_code(lines, cnt)
                print('--------------------------------------------------',file = ref)
                print('FilePath: ', path, file = ref )
                print('LineNumber: ', cnt, file = ref)
                print('Type: ', r[0], file = ref)
                print('SubType: ', r[1], file=ref)
                print('Val: ', r[2], file=ref)
                print('Method: ', match.group(1), file=ref)
                try:
                    print('Code Snippet:\r\n', codes, file=ref)
                except:
                    pass
                ref.flush()
    ref.close()
    with open('result.csv','w') as f:
        for item in output:
            print(item, file = f)

#==================================================
#Description:
#parse argument from command line for different purpose use
#==================================================
def parse_arg():
    parser = argparse.ArgumentParser(description = 'Data Access Code Statistics Tool')
    parser.add_argument('-w', '-walk', dest='walk', action='store_const', const=walk_dir, default=None, help='walk the specific dir and get a list of files.')
    parser.add_argument('-c', '-class', dest='classes', action = 'store_const', const=print, help = 'get a list of classes related to data access.')
    parser.add_argument('-m', '-method', dest='methods', action = 'store_const', const=get_method, help='get the methods called by data access related classes!')
    parser.add_argument('-C','--config', dest='configuration', action='store_const', const=write_config, help='a helper methods for author use to write configuration .')
    parser.add_argument('-t', '--test', dest='test', action='store_const', const=print, help='a helper methods for author use to write configuration .')
    args = parser.parse_args()
    return args

#==================================================
#main function
#==================================================
if __name__ == '__main__':
    logger.setLevel(logging.DEBUG)
    start = time.clock()
    arg = parse_arg()
    p = multiprocessing.Process(target = show_wait)
    p.start()
    #os.system("start cmd /K tail -f level1_ass.txt")
    
    
    #phase 1
    #get all the file paths we are going to process
    #it can be both returned as a list and write to file
    #walk_dir(source_path) 
    if arg.walk != None:
        logger.info("the file list we are processing is generating...")
        arg.walk()
        
        logger.info("generating process completed !")
    #phase 2
    #get list of classes
    if arg.classes != None:
        init()
        flag = check_status()
        count = 0
        while flag:
            count = count + 1
            get_class_list()
            message = 'level%s starts !' %(count)
            logger.critical(message)
            flag = check_status()

    #phase 3
    #get the declared object and the method of the class 
    #get_method()
    if arg.methods != None:
        arg.methods()

    ##configuration phase 
    if arg.configuration != None:
        print("configuration phase")
    

    #here we add our test codes
    if arg.test != None:
        test()
    #arg.classes(arg.test)

    #parse_test()
    #test()
    #write_config()
    #status = check_status()
    #print(status)
    #size("task_log.txt")
    #read_method_info()
    p.terminate()
    end = time.clock()
    message = "%s seconds elapsed !" %(end)
    log(message)
    pass
