#!/usr/bin/python

################################################################
# compute nearest neighbor statistics over databases
################################################################

import numpy,os,os.path,sys,time
from collections import Counter,defaultdict
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy import stats
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
from tables import openFile,Filters,Int32Atom,Float32Atom,Int64Atom
import multiprocessing
import fcntl
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description = "Compute distance statistics for prototypes.")
parser.add_argument('inputs',nargs="*",help="input files")
parser.add_argument('-p','--protos',default="sigmas.h5",help="protype file")
parser.add_argument('-o','--output',default="dstats.h5",help="output file")
parser.add_argument('-N','--nsamples',type=int,default=2000000000,help="max # of samples")
parser.add_argument('-t','--top',type=int,default=5,help="max # of classes in posteriors/counts")
parser.add_argument('-r','--keepreject',action="store_true",help="keep rejects")
parser.add_argument('-R','--norescore',action="store_true",help="don't use rescored data")
parser.add_argument('-Q','--par',default=multiprocessing.cpu_count(),type=int,help="parallelism")
parser.add_argument('--chunksize',default=10000,type=int)
args = parser.parse_args()
if args.inputs==[]: parser.print_help(); sys.exit(0)

protos = Protos()
protos.load(args.protos)

if args.norescore: 
    assert protos.sigmas==None
else: 
    assert protos.sigmas is not None

def process(job):
    fname,start,end = job
    print fname,start,end
    dists = defaultdict(list)
    hists = defaultdict(Counter)
    with openFile(fname) as db:
        for i in range(start,end):
            cls = db.root.classes[i]
            if not args.keepreject and udecode(cls) in [""," ","~","_"]: continue
            image = db.root.patches[i]
            [[n1,n2]],[[d1,d2]] = protos.nn_index(array([image.ravel()]),2)
            hists[n1][cls] += 1
            if cls==protos.classes[n1]:
                dists[n1].append(d1)
    return (dists,hists)

pool = multiprocessing.Pool(args.par)

dists = defaultdict(list)
hists = defaultdict(Counter)

for fname in args.inputs:
    print "===",fname
    with openFile(fname) as db:
        nsamples = min(args.nsamples,len(db.root.classes))
    jobs = [(fname,start,end) for start,end in chunks(nsamples,args.chunksize)]
    for (ds,hs) in pool.imap(process,jobs):
        for k,v in ds.items(): dists[k] += ds[k]
        for k,v in hs.items(): hists[k] += hs[k]

d_means = array([mean(dists[d]) for d in range(protos.n)])
d_sigmas = array([sqrt(var(dists[d])) for d in range(protos.n)])
d_classes = []
d_counts = []
for d in range(protos.n):
    common = hists[d].most_common(args.top)
    if common==[]:
        cl,co = ([],[])
    else:
        cl,co = zip(*common)
    d_classes.append(cl)
    d_counts.append(co)
assert len(d_means)==protos.n
assert len(d_sigmas)==protos.n
assert len(d_classes)==protos.n
assert len(d_counts)==protos.n

print "saving"
with openFile(args.output,"w") as odb:
    with openFile(args.protos) as pdb:
        table_copy(pdb,odb)
        table_lcopy(pdb,odb)
    table_log(odb,"%s %s"%(sys.argv,time.asctime()))
    table_assign(odb,"d_means",d_means)
    table_assign(odb,"d_sigmas",d_sigmas)
    table_assign(odb,"d_classes",arraywrap(d_classes,5,'int64'))
    table_assign(odb,"d_counts",arraywrap(d_counts,5,'int64'))
