#!/usr/bin/python

################################################################
# compute nearest neighbor statistics over databases
################################################################

import numpy,os,os.path,sys,time
from collections import Counter,defaultdict
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy import stats
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
from tables import openFile,Filters,Int32Atom,Float32Atom,Int64Atom
import multiprocessing
import fcntl
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description = "Compute per prototype means and sigmas")
parser.add_argument('inputs',nargs="*")
parser.add_argument('-p','--protos',default="DATA/protos032a.h5",help="protype file")
parser.add_argument('-o','--output',default="sigmas.h5",help="output file")
parser.add_argument('-f','--floor',default=0.1,type=float,help="sigma floor")
parser.add_argument('-N','--nsamples',type=int,default=2000000000,help="max # of samples")
parser.add_argument('-Q','--par',default=multiprocessing.cpu_count(),type=int,help="parallelism")
parser.add_argument('--chunksize',default=50000,type=int)
args = parser.parse_args()
if args.inputs==[]: parser.print_help(); sys.exit(0)

with openFile(args.protos) as pdb:
    protos = array(pdb.root.patches[:,:,:])
    pclasses = array(pdb.root.classes[:])

counts = zeros(len(protos),'f')
sums = zeros(protos.shape,'f')
sums2 = zeros(protos.shape,'f')

nn = NNIndex()
nn.build_index(make2d(protos))

def process(job):
    fname,start,end = job
    print fname,start,end
    counts = zeros(len(protos),'f')
    sums = zeros(protos.shape,'f')
    sums2 = zeros(protos.shape,'f')
    with openFile(fname) as db:
        for i in range(start,end):
            cls = db.root.classes[i]
            image = db.root.patches[i]
            [neighbors],[dists] = nn.nn_index(array([image.ravel()]),10)
            order = argsort(dists)
            [n1,n2] = neighbors[order][:2]
            [d1,d2] = dists[order][:2]
            if cls==pclasses[n1]:
                counts[n1] += 1
                sums[n1] += image
                sums2[n1] += image**2
    return (counts,sums,sums2)

pool = multiprocessing.Pool(args.par)

for fname in args.inputs:
    print "===",fname
    with openFile(fname) as db:
        nsamples = min(args.nsamples,len(db.root.classes))
    jobs = [(fname,start,end) for start,end in chunks(nsamples,args.chunksize)]
    for (cs,s1,s2) in pool.imap(process,jobs):
        counts += cs
        sums += s1
        sums2 += s2

means = sums*1.0/maximum(1,counts[:,newaxis,newaxis])
sigmas = sqrt(maximum(0.0,sums2*1.0/maximum(1,counts[:,newaxis,newaxis])-means**2))
sigmas = maximum(sigmas,args.floor)

print "saving"
with openFile(args.output,"w") as odb:
    with openFile(args.protos) as pdb:
        table_lcopy(pdb,odb)
    table_log(odb,"%s %s"%(sys.argv,time.asctime()))
    table_assign(odb,"patches",means)
    table_assign(odb,"sigmas",sigmas)
    table_assign(odb,"counts",counts)
    table_assign(odb,"classes",pclasses)
