""" 

Created by KONSTANTINOS SIDERIS ,07/19/2011
email : ksideris@ucla.edu
"""

import cv ,sys
import numpy as np
import math
import Gabor
import pickle
import bpnn
import random
import glob
import os
import copy
NoEigenV = 10
win = 20
def ReadFeaturesToFindFile(filename):

    
    featurepoints =['none'];

    txt = open(filename, "r")
    
    line = txt.readline()
    
    featurepoints.remove('none')
    nofeatures = int(line)
    
    for i in range(nofeatures):
        
        line = txt.readline()
        featurepoints.append((line.split()[0],float(line.split()[1]),float(line.split()[2])))

    #print featurepoints
    return featurepoints    

def WriteFeaturesToFindFile(filename,features):

    txt = open(filename, "w")
    
    #line = txt.writeline(len(features))
    #seq = [ str(len(features))+'\n']
    
    txt.write( str(len(features))+'\n')
    
    for i in range(len(features)):
        txt.write(features[i][0]+' '+ str(features[i][1])+' '+ str(features[i][2])+'\n')

    #txt.writelines(seq)
    txt.close()

def matchTheSmartWay2(img,data,nose,win):
    features= ReadFeaturesToFindFile(data)
    dirName = os.path.dirname(data)
    baseName = os.path.basename(data)
    tmp = os.path.splitext(baseName)
    caseNum = tmp[0].split("_")[1]

    N=20
    offset = nose
    
    features_to_find= ReadFeaturesToFindFile(data)
    for ff in range(len(features_to_find)):
        if (features_to_find[ff][0]=='nose_bottom'):
            offset = ( offset[0] -features_to_find[ff][1],offset[1] -features_to_find[ff][2])
            print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset    
    maximum = 0#[0]*len(features_to_find)
    maxpoint =[(0,0)]*len(features_to_find)
    mGabor =Gabor.LoadGaborFFT('gabor')
    
    fh = open(os.path.join(dirName,"nn_%s.dat"%caseNum),'r')
    p = pickle.Unpickler(fh)
    nets = p.load()
    fh.close()            
    Filtered=[]
    nsize = 40*128*128/64;
    size = (128,128);    
    img_size = cv.GetSize( img );
    ipl=    cv.CreateImage(img_size,8,0);
    if(img.nChannels==3):
        cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
    else:
        ipl =img#cv.Copy(img,ipl,0);


    if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):

        tmpsize=cv.CreateImage(size,8,0);    
        cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);
    
        ipl=cv.CreateImage(size,8,0);    
        cv.Copy(tmpsize,ipl);
    


    _object=np.zeros((len(features_to_find),40),np.float32);
    tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);    

    cv.ConvertScale(ipl,tmp,1.0,0);
    
    w=128;h=128;
    img_size = cv.GetSize( tmp );
    imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);    

    cv.ConvertScale(tmp,imtmp,1.0,0);

  
    dft_M = cv.GetOptimalDFTSize( w+h- 1 );
    dft_N = cv.GetOptimalDFTSize( w+h- 1);
    imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );

    cv.Zero(imdft);
    for i in range(h):
        for j in range(w):
            cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])
        
    cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
    n=w*h/64;

    for i in range(5):
        for j in range(8):
    
            gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
        
            cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);
        
            cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);  
            
            #tmp = cv.CreateMat(dft_M, dft_N, mGabor[i*8+j].type);
            #tmp=cv.GetSubRect(gout,(0,0,w+h-1,h+h-1))
               # Save2im(np.asarray( tmp),'hello.png');
            _gout = np.asarray( gout[:,:] )
            Filtered.append(_gout)

    for lkl in range(len(Filtered)):
        nm=str(lkl)+'.bmp';
        #Save2im(Filtered[lkl],nm);
    for k in range(len(features_to_find)):    
        print 'Searching for ',features_to_find[k][0]
        maximum=0    
        for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):    #1,img.width,20): #
            for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #            1,img.height,20
                Weights = np.zeros(40,np.float32);                    
                for m in range(40):
                    Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))
                #math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
                Weights=Gabor.ZeroMeanUnitLength(Weights);
                #Weights *= 1.0/np.sum(Weights**2) 
                #print i,j,Weights
                
                
                
                result = nets[k].update(map(None,Weights))
                #print result
                if(result>maximum and result >0.5):
                    maximum = result
                    maxpoint[k] = (i,j)
                    #print 'think i found it' ,result
                    #print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)

        print 'Best match '+str(maxpoint[k])

    print maxpoint    
    colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
    for m in range(len(maxpoint)):
        cv.Circle(img,maxpoint[m],2,colors[m])
    cv.SaveImage('result.png',img)
    for m in range(len(features_to_find)):
        cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
    cv.SaveImage('result_with_avrg.png',img)


    return maxpoint,features_to_find,offset

def matchTheSmartWayPCA(img,data,nose,win):
        features= ReadFeaturesToFindFile(data)
        dirName = os.path.dirname(data)
        baseName = os.path.basename(data)
        tmp = os.path.splitext(baseName)
        caseNum = tmp[0].split("_")[1]

        N=20

        
        features_to_find= ReadFeaturesToFindFile(data)
        
        offset = nose
        
        for ff in range(len(features_to_find)):
            if (features_to_find[ff][0]=='nose_bottom'):
                offset = ( offset[0]-features_to_find[ff][1],offset[1] -features_to_find[ff][2])#offset[0] -features_to_find[ff][1]
                print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset    
                
        maximum = 0#[0]*len(features_to_find)
        maxpoint =[(0,0)]*len(features_to_find)
        mGabor =Gabor.LoadGaborFFT('gabor')
        
        fh = open(os.path.join(dirName,"nnpca_%s.dat"%caseNum),'r')
        p = pickle.Unpickler(fh)
        nets = p.load()
        fh.close()            
        Filtered=[]
        nsize = 40*128*128/64;
        size = (128,128);    
        img_size = cv.GetSize( img );
        ipl=    cv.CreateImage(img_size,8,0);
        if(img.nChannels==3):
             cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
        else:
               ipl =img#cv.Copy(img,ipl,0);
    
 
        if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):
    
            tmpsize=cv.CreateImage(size,8,0);    
            cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);
        
            ipl=cv.CreateImage(size,8,0);    
            cv.Copy(tmpsize,ipl);
        


        _object=np.zeros((len(features_to_find),40),np.float32);
        tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);    

        cv.ConvertScale(ipl,tmp,1.0,0);
        
        w=128;h=128;
        img_size = cv.GetSize( tmp );
        imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);    

        cv.ConvertScale(tmp,imtmp,1.0,0);
        
        
        dft_M = cv.GetOptimalDFTSize( w+h- 1 );
        dft_N = cv.GetOptimalDFTSize( w+h- 1);
        imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
        
        cv.Zero(imdft);
        for i in range(h):
            for j in range(w):
                 cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])
             
        cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
        n=w*h/64;
        


    
        for i in range(5):
            for j in range(8):
        
                gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
            
                cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);
            
                cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);  
            
            
                    
        
                #tmp = cv.CreateMat(dft_M, dft_N, mGabor[i*8+j].type);
                #tmp=cv.GetSubRect(gout,(0,0,w+h-1,h+h-1))
                # Save2im(np.asarray( tmp),'hello.png');
                _gout = np.asarray( gout[:,:] )
                Filtered.append(_gout)

        for lkl in range(len(Filtered)):
            nm=str(lkl)+'.bmp';
            #Save2im(Filtered[lkl],nm);
        for k in range(len(features_to_find)):
            eigen = np.load(data.split('/')[0]+'/'+features_to_find[k][0]+'_eigen.npy')
            print 'Searching for ',features_to_find[k][0]
            maximum=0    
            for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):    #1,img.width,20): #
                for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #            1,img.height,20
                    Weights = np.zeros(40,np.float32);                    
                    for m in range(40):
                        Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))#math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
                    
                    Weights=Gabor.ZeroMeanUnitLength(Weights);
                    
                    #print eigen.shape
                    vec = calculateWeights(eigen.T,Weights,NoEigenV)
                    
                    
                    
                    result = nets[k].update(map(None,vec))
                    #print result
                    if(result>maximum and result >0.5):
                        maximum = result
                        maxpoint[k] = (i,j)
                        #print 'think i found it' ,result
                        #print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)

            print 'Best match '+str(maxpoint[k])

        print maxpoint    
        colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
        for m in range(len(maxpoint)):
            cv.Circle(img,maxpoint[m],2,colors[m])
        cv.SaveImage('result.png',img)
        for m in range(len(features_to_find)):
            cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
        cv.SaveImage('result_with_avrg.png',img)


        return maxpoint, features_to_find, offset
def matchNaive2(img,data,nose):
        N=40

        offset = nose
        
        
        features_to_find= ReadFeaturesToFindFile(data)
        for ff in range(len(features_to_find)):
            if (features_to_find[ff][0]=='nose_bottom'):
                offset = ( offset[0] -features_to_find[ff][1],offset[1] -features_to_find[ff][2])
                print 'diff',nose,(features_to_find[ff][1],features_to_find[ff][2]),offset    
        maximum = 0#[0]*len(features_to_find)
        maxpoint =[(0,0)]*len(features_to_find)
        mGabor =Gabor.LoadGaborFFT('gabor')
        
            
        Filtered=[]
        nsize = 40*128*128/64;
        size = (128,128);    
        img_size = cv.GetSize( img );
        ipl=    cv.CreateImage(img_size,8,0);
        if(img.nChannels==3):
             cv.CvtColor(img,ipl,cv.CV_BGR2GRAY);
        else:
               ipl =img#cv.Copy(img,ipl,0);
    
 
        if((size[0]!=img_size[0]) or (size[1]!=img_size[1])):
    
            tmpsize=cv.CreateImage(size,8,0);    
            cv.Resize(ipl,tmpsize,cv.CV_INTER_LINEAR);
        
            ipl=cv.CreateImage(size,8,0);    
            cv.Copy(tmpsize,ipl);
        


        _object=np.zeros((len(features_to_find),40),np.float32);
        tmp=cv.CreateImage(size,cv.IPL_DEPTH_64F,0);    

        cv.ConvertScale(ipl,tmp,1.0,0);
        
        w=128;h=128;
        img_size = cv.GetSize( tmp );
        imtmp=cv.CreateImage(img_size,cv.IPL_DEPTH_64F,0);    

        cv.ConvertScale(tmp,imtmp,1.0,0);
    
      
        dft_M = cv.GetOptimalDFTSize( w+h- 1 );
        dft_N = cv.GetOptimalDFTSize( w+h- 1);
        imdft=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
    
        cv.Zero(imdft);
        for i in range(h):
            for j in range(w):
                 cv.Set2D(imdft,i,j,cv.Get2D(imtmp,i,j)[0])
             
        cv.DFT( imdft, imdft, cv.CV_DXT_FORWARD, w );
        n=w*h/64;
        


    
        for i in range(5):
            for j in range(8):
        
                gout=cv.CreateMat(dft_M, dft_N, cv.CV_64FC1 );
            
                cv.MulSpectrums( imdft,mGabor[i*8+j], gout, 0);
            
                cv.DFT( gout, gout, cv.CV_DXT_INV_SCALE,w+h-1);  
            
                _gout = np.asarray( gout[:,:] )
                Filtered.append(_gout)

        for lkl in range(len(Filtered)):
            nm=str(lkl)+'.bmp';
            #Save2im(Filtered[lkl],nm);
        for k in range(len(features_to_find)):    
            maximum=0    
            for i in range(int(round(features_to_find[k][1])+offset[0]-win/2),int(round(features_to_find[k][1])+offset[0]+win/2),1):    #1,img.width,20): #
                for j in range(int(round(features_to_find[k][2])+offset[1]-win/2),int(round(features_to_find[k][2])+offset[1]+win/2),1): #            1,img.height,20
                    Weights = np.zeros(40,np.float32);                    
                    for m in range(40):
                        Weights[m]=math.sqrt(Gabor.avg(Filtered[m],(i,j),10))#math.sqrt(Filtered[m][i,j]*Filtered[m][i,j] + Filtered[m][i,j+1]*Filtered[m][i,j+1])
                    Weights=Gabor.ZeroMeanUnitLength(Weights);
                
                    feat_dataset= np.load(data.split('/')[0]+'/'+features_to_find[k][0]+'.npy')

                    #print feat_dataset[:,:2]
                                
                    result=0
                    

                    s= ( np.dot(feat_dataset,Weights))
                    
                    result =s.max()
                    if(result>maximum ):
                        maximum = result
                        maxpoint[k] = (i,j)
                        #print 'think i found it' ,result
                        #print 'Current Candidate'+str(maxpoint[k]) +' of class ' +str(k)
            print 'Best match '+str(maxpoint[k])


        print maxpoint    
        colors=[(255,0,0),(255,0,0),(255,0,0),(0,255,0),(0,255,0),(0,255,0),(255,255,0),(255,255,0),(0,0,255),(0,0,255)]
        for m in range(len(maxpoint)):
            cv.Circle(img,maxpoint[m],2,colors[m])
        cv.SaveImage('result.png',img)
        for m in range(len(features_to_find)):
            cv.Circle(img,(int(round(features_to_find[m][1])),int(round(features_to_find[m][2]))),1,(0,0,0))
        cv.SaveImage('result_with_avrg.png',img)


        return maxpoint



def pca(X):
  # Principal Component Analysis (http://www.janeriksolem.net/2009/01/pca-for-images-using-python.html)
  # input: X, matrix with training data as flattened arrays in rows
  # return: projection matrix (with important dimensions first),
  # variance and mean

  #get dimensions
  num_data,dim = X.shape

  #center data
  mean_X = X.mean(axis=0)
  for i in range(num_data):
      X[i] -= mean_X

  if dim>100: 
      print 'PCA - compact trick used'
      M = np.dot(X,X.T) #covariance matrix
      e,EV = np.linalg.eigh(M) #eigenvalues and eigenvectors
      #tmp = np.dot(X.T,EV).T #this is the compact trick
      U = EV[::-1] #reverse since last eigenvectors are the ones we want
      S = np.sqrt(e)[::-1] #reverse since eigenvalues are in increasing order
  else:
      print 'PCA - SVD used'
      U,S,V = np.linalg.svd(X,0)
      
      #print S.shape
      #V = V[:num_data] #only makes sense to return the first num_data

  #return the projection matrix, the variance and the mean
  return U,S,mean_X
def excludeFilesFromDataSet(files,exclude):
    for ex in exclude:
        try:
            files.remove(ex)
            print "%s excluded"%ex
        except ValueError:
            pass
def createDataSet3(dataset,folder, suffix=".png"):
    """Create dataset for feature identification. Uses all files found in
    dataset except those specified in exclude"""
    # Identify potential files to build dataset from
    masterFiles = glob.glob(os.path.join(dataset,"*"+suffix))
    masterFiles.sort()
    for ff in masterFiles:
        tmp = os.path.splitext(ff)
        bn = os.path.basename(tmp[0])
        k = int(bn)
        files = copy.copy(masterFiles)

        excludedFiles = [ff]
        excludeFilesFromDataSet(files,excludedFiles)
        NoData = len(files)
        N=40
        feature_average =[('',0,0)]*10
        
        nofeatures=0;
        _feat = np.zeros((10,int(NoData),40),np.float32)
        for j in range(len(files)):
            f = files[j]
            img = cv.LoadImage(f)
            tmp = os.path.splitext(f)
            
            nofeatures,features_to_filter = \
                Gabor.readFeaturesFile(tmp[0]+'.data')
            mGabor = Gabor.LoadGaborFFT('gabor')
                
                
            fea_vec = \
                Gabor.extract_features2(img,mGabor,features_to_filter)
            
            
            print f
            for i in range(nofeatures):    
                _feat[i][j] = fea_vec[i]
                feature_average[i] = (features_to_filter[i][0],feature_average[i][1]+features_to_filter[i][1],feature_average[i][2]+features_to_filter[i][2])        
        for i in range(nofeatures):
            tmp = os.path.splitext(features_to_filter[i][0])
            np.save(os.path.join(folder,tmp[0]+"_%03d"%(k)+tmp[1]),_feat[i])
            feature_average[i] =(feature_average[i][0], feature_average[i][1]/float(NoData)    ,feature_average[i][2]/float(NoData)    )    

        print feature_average
        WriteFeaturesToFindFile(os.path.join(folder,'features_%03d.data'%(k)),feature_average)

def createDataSet2(dataset,NoData,folder):
        N=40
        feature_average =[('',0,0)]*10
        
        nofeatures=0;
        _feat = np.zeros((10,int(NoData),40),np.float32)
        for j in range(1,int(NoData)+1):
            img = cv.LoadImage(dataset+'/'+str(j)+'.png')
            
            nofeatures,features_to_filter = Gabor.readFeaturesFile(dataset+'/'+str(j)+'.data')
            mGabor = Gabor.LoadGaborFFT('gabor')
                
                
            fea_vec = Gabor.extract_features2(img,mGabor,features_to_filter)
            
            
            print j
            for i in range(nofeatures):    
                _feat[i][j-1] = fea_vec[i]
                feature_average[i] = (features_to_filter[i][0],feature_average[i][1]+features_to_filter[i][1],feature_average[i][2]+features_to_filter[i][2])        
        for i in range(nofeatures):    
            np.save(folder+'/'+features_to_filter[i][0],_feat[i])
            feature_average[i] =(feature_average[i][0], feature_average[i][1]/float(sys.argv[3])    ,feature_average[i][2]/float(sys.argv[3])    )    

            
        #for i in range(nofeatures):
            
        print feature_average
        WriteFeaturesToFindFile(folder+'/'+'features.data',feature_average)

def calculateWeights(eigenv,data,selectedfacesnum):     
    
    usub=eigenv[:selectedfacesnum,:]        
    #print usub.shape,data.shape
    wts=np.dot(usub,data).transpose()                         
    return wts          



def trainSimpleNet(population):
     
    #print population[0][0],len(population[0][0])
    input_size = len(population[0][0])
    #print 'input_size',input_size


    # create a network with two input, two hidden, and one output nodes
    n = bpnn.NN(input_size, 2, 1)
    # train it with some patterns
    n.train(population)
    
    
    
      

    return n
def ExtractPCAofDATA(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)
    dirName = os.path.dirname(featuresfile)
    baseName = os.path.basename(featuresfile)
    tmp = os.path.splitext(baseName)
    caseNum = tmp[0].split("_")[1]
    
    for k in range(len(features)):
        ffile = features[k][0]+"_%s"%caseNum+".npy"
        print "feature file to read",ffile
        data= np.load(os.path.join(dirName,ffile)).T
        evec, eval,mean = pca(data)
        reversedevalueorder=eval.argsort()[::-1]
        evec=evec[:,reversedevalueorder]
        eval=np.sort(eval)[::-1]       
        avgvals=np.average(data,axis=0) 
        data2=data-avgvals
        
        print evec.shape,evec[:,:NoEigenV].shape
        
        vec = calculateWeights(evec.T,data2,NoEigenV)
        print vec.shape
        pcfile = os.path.join(dirName,"%s_%s_pc"%(features[k][0],caseNum))
        print "pcfile to save",pcfile
        np.save(pcfile,vec)
        eigenfile = os.path.join(dirName,"%s_%s_eigen"%(features[k][0],caseNum))         
        print "eigenfile to save",eigenfile
        np.save(eigenfile,evec[:,:NoEigenV])
    


def LoadDataAndTrain2(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)
    dirName = os.path.dirname(featuresfile)
    baseName = os.path.basename(featuresfile)
    tmp = os.path.splitext(baseName)
    caseNum = tmp[0].split("_")[1]
    

    features= ReadFeaturesToFindFile(featuresfile)
    nets=[]
    feat_dataset=[]
    for k in range(len(features)):
        ffile = os.path.join(dirName,"%s_%s.npy"%(features[k][0],caseNum))
        
        feat_dataset.append(np.load(ffile))
        
    for k in range(len(features)):
        
        population = []
        
        for i in range(int(feat_dataset[k].shape[0])):
            population.append( [map(None, feat_dataset[k][i]),[1]])
        
        print 'training',features[k][0]
        nets.append( trainSimpleNet(population))
        nnfile = os.path.join(dirName,"nn_%s.dat"%caseNum)
    fh = open(nnfile,'w')
    p = pickle.Pickler(fh)
    p.dump(nets)
    fh.close()
    
        
    print 'network Training Complete'
    return nets    
    
def LoadDataAndTrainPCA(featuresfile):
    features= ReadFeaturesToFindFile(featuresfile)
    dirName = os.path.dirname(featuresfile)
    baseName = os.path.basename(featuresfile)
    tmp = os.path.splitext(baseName)
    caseNum = tmp[0].split("_")[1]

    features= ReadFeaturesToFindFile(featuresfile)
    nets=[]
    feat_dataset=[]
    for k in range(len(features)):
        ffile = os.path.join(dirName,features[k][0]+"_%s_pc.npy"%caseNum)
        feat_dataset.append(np.load(ffile))
        print feat_dataset[k].shape
    for k in range(len(features)):
        
        population = []
        
        for i in range(int(feat_dataset[k].shape[0])):
            population.append( [map(None, feat_dataset[k][i]),[1]])
            
        print 'training',features[k][0]
        nets.append( trainSimpleNet(population))

        nnpcaFile = os.path.join(dirName,"nnpca_%s.dat"%caseNum)
    fh = open(nnpcaFile,'w')
    p = pickle.Pickler(fh)
    p.dump(nets)
    fh.close()
    print 'network Training Complete'
    return nets    
def main():

    N=40
    
    mode = str(sys.argv[1])
    
    if (mode == '-h'):
        print ' '
        print ' '
        print '-----GABOR FACIAL FEATURE LOCALIZATION-----'    
        print ' '
        print ' '
        print '->author (with exceptions) : Konstantinos Sideris . email: ksideris@ucla.edu '
        print ' '
        print ' '
        print '-----HELP-----'
        print ' '
        print ' '
        print '->python Gabor.py -b'
        print ' '
        print ' Build Gabor Wavelets database and their FFT transform . This function only needs to be run once '
        print ' '
        print '->python Gabor.py -bt <folder> <NoImages> <target_folder>'
        print ' '
        print ' Create Training Set from training images in <folder>. The total number of images considerered is  <NoImages> and they myst be named 1.png 2.pg ... <NoImages>.png . There must be accompanied by a file 1.dat , 2.dat etc that describes the location of their features in the image. They all must have the same number/type of features. This process saves the data as binary files in <target_folder> as well as a file named features.data that contains the type of features learned and their average location in the training set '
        print ' '
        print '->python Gabor.py -nn <features.data location> '
        print ' '
        print ' Trains a neural network to recognize the features described in features.data'
        print ' '
        print ' '
        print '->python Gabor.py -m <image> <features.data location> '
        print ' '
        print ' Uses the trained network (must be in the same folder as features.data and named nn.dat) to localize in the <image> the features described in features.data'
        print ' '
        print '->python Gabor.py -mn <image> <features.data location> '
        print ' '
        print ' Uses a simpler localization algorithm to localize in the <image> the features described in features.data'
        print ' '
    
    elif (mode == '-b'):
    
        Gabor.gabor_filter('gabor');
    elif (mode == '-m'):
        print 'Matching'
        img = cv.LoadImage(sys.argv[2])
        matchTheSmartWay2(img,sys.argv[3])
        
    elif (mode == '-mn'):
        print 'Matching'
        
        img = cv.LoadImage(sys.argv[2])
        matchNaive2(img,sys.argv[3])
    elif (mode == '-mp'):
        print 'Matching'
        
        img = cv.LoadImage(sys.argv[2])
        matchTheSmartWayPCA(img,sys.argv[3],(-20,-40))
    elif (mode == "-nn"): 
        print 'Training Neural Network'
        LoadDataAndTrain2(sys.argv[2])
    elif (mode == "-nnpca"): 
        print 'Training Neural Network'
        LoadDataAndTrainPCA(sys.argv[2])
    elif (mode == '-bt'):
        print 'Creating Dataset'
        createDataSet2(sys.argv[2],sys.argv[3],sys.argv[4])
    elif (mode == '-btxv'):
        print 'Creating Dataset'
        createDataSet3(sys.argv[2],sys.argv[3])

    elif (mode == '-pca'):
    #trainSimpleNet(2)
        ExtractPCAofDATA(sys.argv[2])
        #LoadDataAndTrain(sys.argv[2])

    elif (mode == '-test'):
        img = cv.LoadImage('1.TIFF')
        mGabor = LoadGaborFFT('gabor')
        print mode
        features = ReadFeaturesToFindFile('data/features.data')    
        extract_features2(img,mGabor,features)
    
        print mode

if __name__ == "__main__":
    main()
