""" 

Created by KONSTANTINOS SIDERIS ,07/19/2011
email : ksideris@ucla.edu
"""

import cv ,sys
import numpy as np
import math
import Gabor
import pickle
import bpnn
import random
import glob
import os
from IPython.Debugger import Tracer
from optparse import OptionParser

debug_here = Tracer()

NoEigenV = 10
win = 20

def getParser():
    parser = OptionParser()
    parser.add_option("-n", "--nosample",dest= "nosample",type = "int", default=0)
    parser.add_option("-m", "--mode", dest = "mode",type = "string",default= "")
    parser.add_option("-d", "--features", dest = "features", type ="string", default = "")
    parser.add_option("-i", "--images", dest = "images", type = "string", default = "")
    parser.add_option("-f", "--filename", dest = "filename", type = "string", default = "")
    return parser

def ReadFeaturesToFindFile(filename):
	

    featurepoints =['none'];
    
    txt = open(filename, "r")
    
    line = txt.readline()
    
    featurepoints.remove('none')
    nofeatures = int(line)
    
    for i in range(nofeatures):
        line = txt.readline()
        featurepoints.append((line.split()[0],float(line.split()[1]),float(line.split()[2])))

    #print featurepoints
    return featurepoints	

def WriteFeaturesToFindFile(filename,features):

    txt = open(filename, "w")

    #line = txt.writeline(len(features))
    #seq = [ str(len(features))+'\n']

    txt.write( str(len(features))+'\n')

    for i in range(len(features)):
        txt.write(features[i][0]+' '+ str(features[i][1])+' '+ str(features[i][2])+'\n')

    #txt.writelines(seq)
    txt.close()

def matchTheSmartWay2(img,data,nose):
    N=40
    offset = nose

    features_to_find= ReadFeaturesToFindFile(data)
    for ff in range(len(features_to_find)):
        if (features_to_find[ff][0]=='nose_bottom'):
            offset = ( offset[0] -features_to_find[ff][1],offset[1] -features_to_find[ff][2])
            print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset	
    maximum = 0#[0]*len(features_to_find)
    maxpoint =[(0,0)]*len(features_to_find)
    mGabor =Gabor.LoadGaborFFT('gabor')

    fh = open(data.split('/')[0]+'/nn.dat','r')
    p = pickle.Unpickler(fh)
    nets = p.load()
    fh.close()			
    Filtered=[]
    nsize = 40*128*128/64;
    size = (128,128);
    img_size = cv.GetSize( img );
    ipl = cv.CreateImage(img_size,8,0);
    if(img.nChannels==3):
        cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
    else:
        ipl =img#cv.Copy(img,ipl,0);

    if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):

        tmpsize=cv.CreateImage(size,8,0);
        cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);

        ipl=cv.CreateImage(size,8,0);	
        cv.Copy(tmpsize,ipl);

    _object=np.zeros((len(features_to_find),40),np.float32);
    tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);	

    cv.ConvertScale(ipl,tmp,1.0,0);

    w=128;h=128;
    img_size = cv.GetSize( tmp );
    imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);
    cv.ConvertScale(tmp,imtmp,1.0,0);

 
    dft_M = cv.GetOptimalDFTSize( w+h- 1 );
    dft_N = cv.GetOptimalDFTSize( w+h- 1);
    imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );

    cv.Zero(imdft);
    for i in range(h):
        for j in range(w):
            cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])
    cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
    n=w*h/64;

    for i in range(5):
        for j in range(8):
            gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );		
            cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);

            cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);

            #tmp = cv.CreateMat(dft_M, dft_N, mGabor[i*8+j].type);
            #tmp=cv.GetSubRect(gout,(0,0,w+h-1,h+h-1))
            # Save2im(np.asarray( tmp),'hello.jpg');
            _gout = np.asarray( gout[:,:] )
            Filtered.append(_gout)

    for lkl in range(len(Filtered)):
        nm=str(lkl)+'.bmp';
        #Save2im(Filtered[lkl],nm);
    for k in range(len(features_to_find)):	
        print 'Searching for ',features_to_find[k][0]
        maximum=0	
        for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):	#1,img.width,20): #
            for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #			1,img.height,20
                Weights = np.zeros(40,np.float32);
                
                for m in range(40):
                    Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))
                #math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
                Weights=Gabor.ZeroMeanUnitLength(Weights);
                #Weights *= 1.0/np.sum(Weights**2) 
                #print "i"+ str(i)+ ",j"+str(j)+ "weights, "+ Weights

                result = nets[k].update(map(None,Weights))
                #print result
                if(result>maximum and result >0.5):
                    maximum = result
                    maxpoint[k] = (i,j)
                    #print 'think i found it' ,result
                    #print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)

	    print 'Best match '+str(maxpoint[k])

    print maxpoint	
    colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
    for m in range(len(maxpoint)):
        cv.Circle(img,maxpoint[m],2,colors[m])
    cv.SaveImage('result.png',img)
    for m in range(len(features_to_find)):
        cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
    cv.SaveImage('result_with_avrg.png',img)


    return maxpoint

def matchTheSmartWayPCA(img,data,nose):

    N=40
    
    features_to_find= ReadFeaturesToFindFile(data)
        
    offset = nose
   
        
    for ff in range(len(features_to_find)):
        if (features_to_find[ff][0]=='nose_bottom'):
            offset = ( 0,offset[1] -features_to_find[ff][2])#offset[0] -features_to_find[ff][1]
            print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset	

    maximum = 0#[0]*len(features_to_find)
    maxpoint =[(0,0)]*len(features_to_find)
    #print "mapoint"+ str(maxpoint)

    mGabor =Gabor.LoadGaborFFT('gabor')

    fh = open(data.split('/')[0]+'/nnpca.dat','r')
    p = pickle.Unpickler(fh)
    nets = p.load()
    fh.close()			
    Filtered=[]
    nsize = 40*128*128/64;
    size = (128,128);	
    img_size = cv.GetSize( img );
    ipl=	cv.CreateImage(img_size,8,0);
    if(img.nChannels==3):
        cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
    else:
        ipl =img#cv.Copy(img,ipl,0);

	if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):
	
		tmpsize=cv.CreateImage(size,8,0);	
		cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);
		
		ipl=cv.CreateImage(size,8,0);	
		cv.Copy(tmpsize,ipl);	

	_object=np.zeros((len(features_to_find),40),np.float32);
	tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);	

	cv.ConvertScale(ipl,tmp,1.0,0);
		
	w=128;h=128;
	img_size = cv.GetSize( tmp );
	imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);	

	cv.ConvertScale(tmp,imtmp,1.0,0);
	 
	dft_M = cv.GetOptimalDFTSize( w+h- 1 );
	dft_N = cv.GetOptimalDFTSize( w+h- 1);
	imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
	
	cv.Zero(imdft);
	for i in range(h):
		for j in range(w):
			cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])

	cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
	n=w*h/64;
		


	
	for i in range(5):
		for j in range(8):	
			gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
			
			cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);
			
			cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);  
		
			#tmp = cv.CreateMat(dft_M, dft_N, mGabor[i*8+j].type);
			#tmp=cv.GetSubRect(gout,(0,0,w+h-1,h+h-1))
			# Save2im(np.asarray( tmp),'hello.jpg');
			_gout = np.asarray( gout[:,:] )
			Filtered.append(_gout)

	for lkl in range(len(Filtered)):
		nm=str(lkl)+'.bmp';
		#Save2im(Filtered[lkl],nm);
	for k in range(len(features_to_find)):
		eigen = np.load(data.split('/')[0]+'/'+features_to_find[k][0]+'_eigen.npy')
		print 'Searching for ',features_to_find[k][0]
		maximum=0	
		for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):	#1,img.width,20): #
			for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #			1,img.height,20
				Weights = np.zeros(40,np.float32);					
				for m in range(40):
					Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))#math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
					
				Weights=Gabor.ZeroMeanUnitLength(Weights);
					
				#print eigen.shape
				vec = calculateWeights(eigen.T,Weights,NoEigenV)				
					
				result = nets[k].update(map(None,vec))
				#print result
				if(result>maximum and result >0.5):
					maximum = result
					maxpoint[k] = (i,j)
					#print 'think i found it' ,result
					#print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)

		print 'Best match '+str(maxpoint[k])

	print maxpoint	
	colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
	for m in range(len(maxpoint)):
		cv.Circle(img,maxpoint[m],2,colors[m])
	cv.SaveImage('result.png',img)
	for m in range(len(features_to_find)):
		cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
	cv.SaveImage('result_with_avrg.png',img)


	return maxpoint

def matchNaive2(img,data,nose):
	N=40

	offset = nose
		
	features_to_find= ReadFeaturesToFindFile(data)
	for ff in range(len(features_to_find)):
		if (features_to_find[ff][0]=='nose_bottom'):
			offset = ( offset[0] -features_to_find[ff][1],offset[1] -features_to_find[ff][2])
			print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset	
	maximum = 0#[0]*len(features_to_find)
	maxpoint =[(0,0)]*len(features_to_find)
	mGabor =Gabor.LoadGaborFFT('gabor')
		
	
	Filtered=[]
	nsize = 40*128*128/64;
	size = (128,128);	
	img_size = cv.GetSize( img );
	ipl=	cv.CreateImage(img_size,8,0);
	if(img.nChannels==3):
		cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
	else:
		ipl =img#cv.Copy(img,ipl,0);

	if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):
	
		tmpsize=cv.CreateImage(size,8,0);	
		cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);
		
		ipl=cv.CreateImage(size,8,0);	
		cv.Copy(tmpsize,ipl);


	_object=np.zeros((len(features_to_find),40),np.float32);
	tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);	

	cv.ConvertScale(ipl,tmp,1.0,0);
		
	w=128;h=128;
	img_size = cv.GetSize( tmp );
	imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);	

	cv.ConvertScale(tmp,imtmp,1.0,0);
	 
	dft_M = cv.GetOptimalDFTSize( w+h- 1 );
	dft_N = cv.GetOptimalDFTSize( w+h- 1);
	imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
	
	cv.Zero(imdft);
	for i in range(h):
		for j in range(w):
			cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])
			
	cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
	n=w*h/64;

	
	for i in range(5):
		for j in range(8):		
			gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
			
			cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);
			
			cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);  
			
			_gout = np.asarray( gout[:,:] )
			Filtered.append(_gout)

	for lkl in range(len(Filtered)):
		nm=str(lkl)+'.bmp';
		#Save2im(Filtered[lkl],nm);
	for k in range(len(features_to_find)):	
		maximum=0	
		for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):	#1,img.width,20): #
			for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #			1,img.height,20
				Weights = np.zeros(40,np.float32);					
				for m in range(40):
					Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))#math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
				Weights=Gabor.ZeroMeanUnitLength(Weights);
				
				feat_dataset= np.load(data.split('/')[0]+'/'+features_to_find[k][0]+'.npy')

				#print feat_dataset[:,:2]
								
				result=0
					

				s= ( np.dot(feat_dataset,Weights))
					
				result =s.max()
				if(result>maximum ):
					maximum = result
					maxpoint[k] = (i,j)
					#print 'think i found it' ,result
					#print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)
		print 'Best match '+str(maxpoint[k])


	print maxpoint	
	colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
	for m in range(len(maxpoint)):
		cv.Circle(img,maxpoint[m],2,colors[m])
	cv.SaveImage('result.png',img)
	for m in range(len(features_to_find)):
		cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
	cv.SaveImage('result_with_avrg.png',img)


	return maxpoint



def pca(X):
  # Principal Component Analysis (http://www.janeriksolem.net/2009/01/pca-for-images-using-python.html)
  # input: X, matrix with training data as flattened arrays in rows
  # return: projection matrix (with important dimensions first),
  # variance and mean

  #get dimensions
  num_data,dim = X.shape

  #center data
  mean_X = X.mean(axis=0)
  for i in range(num_data):
      X[i] -= mean_X

  if dim>100: 
      print 'PCA - compact trick used'
      M = np.dot(X,X.T) #covariance matrix
      e,EV = np.linalg.eigh(M) #eigenvalues and eigenvectors
      #tmp = np.dot(X.T,EV).T #this is the compact trick
      U = EV[::-1] #reverse since last eigenvectors are the ones we want
      S = np.sqrt(e)[::-1] #reverse since eigenvalues are in increasing order
  else:
      print 'PCA - SVD used'
      U,S,V = np.linalg.svd(X,0)
      
      #print S.shape
      #V = V[:num_data] #only makes sense to return the first num_data

  #return the projection matrix, the variance and the mean
  return U,S,mean_X

def createDataSet2(dataset,NoData,folder):
    N=40
    NoData = int(NoData)
    feature_average =[('',0,0)]*10

    nofeatures=0;
    _feat = np.zeros((10,int(NoData),40),np.float32)
    files = glob.glob(os.path.join(dataset,"*.png"))[:NoData]
    for j in range(len(files)):
        f = files[j]
        tmp = os.path.splitext(f)
        img = cv.LoadImage(f)#dataset+'/'+str(j)+'.png')

        nofeatures,features_to_filter = Gabor.readFeaturesFile(tmp[0]+'.data')
        #debug_here()
        mGabor = Gabor.LoadGaborFFT('gabor')

        fea_vec = Gabor.extract_features2(img,mGabor,features_to_filter)

        print j
        for i in range(nofeatures):	
            _feat[i][j] = fea_vec[i]
            feature_average[i] = (features_to_filter[i][0],feature_average[i][1]+features_to_filter[i][1],feature_average[i][2]+features_to_filter[i][2])		
    for i in range(nofeatures):	
        np.save(folder+'/'+features_to_filter[i][0],_feat[i])
        feature_average[i] =(feature_average[i][0], feature_average[i][1]/float(len(files))	,feature_average[i][2]/float(len(files))	)	

	#for i in range(nofeatures):

    print feature_average
    WriteFeaturesToFindFile(folder+'/'+'features.data',feature_average)

def calculateWeights(eigenv,data,selectedfacesnum):     

    usub=eigenv[:selectedfacesnum,:]        
    #print usub.shape,data.shape
    wts=np.dot(usub,data).transpose()                         
    return wts          


def trainSimpleNet(population):

    #print population[0][0],len(population[0][0])
    input_size = len(population[0][0])
    #print 'input_size',input_size


    # create a network with two input, two hidden, and one output nodes
    n = bpnn.NN(input_size, 2, 1)
    # train it with some patterns
    n.train(population)

    return n

def ExtractPCAofDATA(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)

    for k in range(len(features)):

        data= np.load(featuresfile.split('/')[0]+'/'+features[k][0]+'.npy').T
        #NoEigenV =5
        #print data.shape	
        evec, eval,mean = pca(data)
        reversedevalueorder=eval.argsort()[::-1]
        evec=evec[:,reversedevalueorder]
        eval=np.sort(eval)[::-1]       
        avgvals=np.average(data,axis=0) 
        data2=data-avgvals

        print evec.shape,evec[:,:NoEigenV].shape

        vec = calculateWeights(evec.T,data2,NoEigenV)
        print vec.shape
        np.save(featuresfile.split('/')[0]+'/'+features[k][0]+'_pc',vec)

        np.save(featuresfile.split('/')[0]+'/'+features[k][0]+'_eigen',evec[:,:NoEigenV])

def LoadDataAndTrain2(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)
    nets=[]
    feat_dataset=[]
    for k in range(len(features)):

        feat_dataset.append(np.load(featuresfile.split('/')[0]+'/'+features[k][0]+'.npy'))

    for k in range(len(features)):

        population = []

        for i in range(int(feat_dataset[k].shape[0])):
            population.append( [map(None, feat_dataset[k][i]),[1]])

        print 'training',features[k][0]
        nets.append( trainSimpleNet(population))

    fh = open(featuresfile.split('/')[0]+'/nn.dat','w')
    p = pickle.Pickler(fh)
    p.dump(nets)
    fh.close()
    
    print 'network Training Complete'
    return nets	

def LoadDataAndTrainPCA(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)
    nets=[]
    feat_dataset=[]
    for k in range(len(features)):

        feat_dataset.append(np.load(featuresfile.split('/')[0]+'/'+features[k][0]+'_pc.npy'))
        print feat_dataset[k].shape
    for k in range(len(features)):
        population = []

        for i in range(int(feat_dataset[k].shape[0])):
            population.append( [map(None, feat_dataset[k][i]),[1]])

        print 'training',features[k][0]
        nets.append( trainSimpleNet(population))

    fh = open(featuresfile.split('/')[0]+'/nnpca.dat','w')
    p = pickle.Pickler(fh)
    p.dump(nets)
    fh.close()

    print 'network Training Complete'
    return nets

def main():
    
    parser = getParser()
    (options, args) = parser.parse_args()
    N=40

    mode = str(options.mode)
    
    if (mode == '-h'):
        print ' '
        print ' '
        print '-----GABOR FACIAL FEATURE LOCALIZATION-----'	
        print ' '
        print ' '
        print '->author (with exceptions) : Konstantinos Sideris . email: ksideris@ucla.edu '
        print ' '
        print ' '
        print '-----HELP-----'
        print ' '
        print ' '
        print '->python Gabor.py -b'
        print ' '
        print ' Build Gabor Wavelets database and their FFT transform . This function only needs to be run once '
        print ' '
        print '->python Gabor.py -bt <folder> <NoImages> <target_folder>'
        print ' '
        print ' Create Training Set from training images in <folder>. The total number of images considerered is  <NoImages> and they myst be named 1.jpg 2.pg ... <NoImages>.jpg . There must be accompanied by a file 1.dat , 2.dat etc that describes the location of their features in the image. They all must have the same number/type of features. This process saves the data as binary files in <target_folder> as well as a file named features.data that contains the type of features learned and their average location in the training set '
        print ' '
        print '->python Gabor.py -nn <features.data location> '
        print ' '
        print ' Trains a neural network to recognize the features described in features.data'
        print ' '
        print ' '
        print '->python Gabor.py -m <image> <features.data location> '
        print ' '
        print ' Uses the trained network (must be in the same folder as features.data and named nn.dat) to localize in the <image> the features described in features.data'
        print ' '
        print '->python Gabor.py -mn <image> <features.data location> '
        print ' '
        print ' Uses a simpler localization algorithm to localize in the <image> the features described in features.data'
        print ' '

    elif (mode == '-b'):

        Gabor.gabor_filter('gabor');
    elif (mode == '-m'):
        print 'Matching'
        img = cv.LoadImage(options.filename)
        matchTheSmartWay2(img,options.features)

    elif (mode == '-mn'):
        print 'Matching'

        img = cv.LoadImage(options.filename)
        matchNaive2(img,options.features)
    elif (mode == '-mp'):
        print 'Matching'

        img = cv.LoadImage(options.filename)
        matchTheSmartWayPCA(img,options.features,(-20,-40))
    elif (mode == "-nn"): 
        print 'Training Neural Network'
        LoadDataAndTrain2(options.features)
    elif (mode == "-nnpca"): 
        print 'Training Neural Network'
        LoadDataAndTrainPCA(options.features)
    elif (mode == '-bt'):
        print 'Creating Dataset'
        createDataSet2(options.images,options.nosample,options.features)
    elif (mode == '-pca'):
    #trainSimpleNet(2)
        ExtractPCAofDATA(options.features)
        #LoadDataAndTrain(sys.argv[2])

    elif (mode == '-test'):
        img = cv.LoadImage('1.TIFF')
        mGabor = LoadGaborFFT('gabor')
        print mode
        features = ReadFeaturesToFindFile('data/features.data')	
        extract_features2(img,mGabor,features)

        print mode

if __name__ == "__main__":
    main()
