#!/usr/bin/python

### Compute all pairwise distances of patches.

import numpy,os,os.path,sys
from collections import Counter
import matplotlib
if "DISPLAY" not in os.environ: matplotlib.use("AGG")
else: matplotlib.use("GTK")
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
from tables import openFile,Filters,Int32Atom,Float32Atom,Int64Atom
import multiprocessing
import fcntl
from llpy.sutils import *

import argparse

parser = argparse.ArgumentParser(description = 
"""Evaluate prototypes with nearest neighbor classification, optionally)
using rescoring with diagonal covariance matrices, filtering based
on line geometry and different cost functions.""")

parser.add_argument('inputs',nargs="*",help="input files")
parser.add_argument('-o','--output',default=None,help="output file for error information")
parser.add_argument('-p','--protos',default=None,help="prototypes")
parser.add_argument('-m','--mode',default='knn',help='knn, rescore, vc')
parser.add_argument('-k','--k',default=1,type=int,help='number of neighbors')
parser.add_argument('-i','--variants',default=1,type=int,help="number of variants to try")
parser.add_argument('-l','--linefilter',default=1000.0,type=float,help="use line information for classification (choose a value around 2)")
parser.add_argument('-r','--rescore',default=None,type=int,help="number of neighbors for rescoring (try 200)")
parser.add_argument('-f','--floor',default=0.1,type=float,help="sigma floor")
parser.add_argument('-c','--cost',default=1.0,type=float,help="cost factor for density (0=Mahalanobis, 1=neg log likelihood)")
parser.add_argument('-d','--dilation',default=0,type=int,help="dilation of variance")
parser.add_argument('-v','--verbosity',default=2,type=int,help='verbosity level (default=1)')
parser.add_argument('-S','--sfloor',default=None,type=float,help='floor for standard deviation')
parser.add_argument('-R','--report',default=0.8,type=float,help="reporting threshold")
parser.add_argument('-N','--nsamples',default=2000000000,type=int,help="max # test cases")
parser.add_argument('-Q','--par',default=multiprocessing.cpu_count(),type=int,help="parallelism")
parser.add_argument('--chunksize',default=1000,type=int)
# use variants
#args = parser.parse_args(["-l","1.0","-m","knn","-N","10000","out.h5","training.h5"])
args = parser.parse_args()
if len(args.inputs)==0:
    parser.print_help()
    sys.exit(0)

assert args.protos is not None
assert len(args.inputs)==1 or args.output is None

protos = Protos(verbosity=args.verbosity)
protos.load(args.protos)
if args.rescore is not None: 
    protos.k_rescore = args.rescore
else:
    pass
if args.verbosity>0: print "rescoring",protos.k_rescore
if protos.k_rescore>1: 
    assert protos.sigmas is not None,\
        "rescoring (%d) requires presence of sigmas"%protos.k_rescore
if args.sfloor is not None: protos.sfloor = args.sfloor

def linefilter(rel,results,dists):
    for i in range(len(results)):
        p = results[i]
        rmeans = relmeans[p]
        rsigmas = relsigmas[p]
        deltas = (rel-rmeans)**2/(2*maximum(0.01,rsigmas)**2)
        # print rel,rmeans,rsigmas,deltas
        if args.linefilter<0:
            print amax(deltas)
        elif (deltas>args.linefilter).any():
            dists[i] = 999.0
    order = argsort(dists)
    return results[order],dists[order]

preds = {}

def process(job):
    # print job
    fname,start,end = job
    errors = 0
    lerrors = 0
    total = 0
    confusion = Counter()
    protoerrs = Counter()
    protoconf = Counter()
    preds = {}
    with openFile(fname) as db:
        for i in range(start,end):
            cls = db.root.classes[i]
            if udecode(cls) in [""," ","~","_"]: continue
            image = db.root.patches[i]
            [[n1,n2]],[[d1,d2]] = protos.nn_index(array([image.ravel()]),2)
            # FIXME add line filter
            pred = protos.classes[n1]
            preds[i] = (cls,pred,n1,d1)
            if cls!=pred:
                errors += 1
                protoerrs[n1] += 1
                confusion[(cls,pred)] += 1
                protoconf[(n1,cls,pred)] += 1
            if not confusable(cls,pred):
                lerrors += 1
            total += 1
    return (errors,lerrors,total,confusion,protoerrs,protoconf,preds)

pool = multiprocessing.Pool(args.par)

errors = 0
lerrors = 0
total = 0
confusion = Counter()
protoerrs = Counter()
protoconf = Counter()

for fname in args.inputs:
    if args.verbosity>1: print "===",fname
    with openFile(fname) as db:
        nsamples = min(args.nsamples,len(db.root.classes))
    jobs = [(fname,start,end) for start,end in chunks(nsamples,args.chunksize)]
    for (e,l,t,c,pe,pc,pr) in pool.imap(process,jobs):
        errors += e
        lerrors += l
        total += t
        confusion += c
        protoerrs += pe
        protoconf += pc
        preds.update(pr.items())
        if args.verbosity>1: print "# %.4f %.4f %8d %8d"%(errors*1.0/total,lerrors*1.0/total,errors,total)

print "llnneval-result",
print args.protos,
print "+".join(args.inputs),
print "%.4f %.4f %8d %8d"%(errors*1.0/total,lerrors*1.0/total,errors,total)

if args.verbosity>0:
    cum = 0
    for (pred,cls),n in confusion.most_common(10000):
        print "confusion %6d %6d   %6d   %s %s"%(pred,cls,n,udecode(pred),udecode(cls))
        cum += n
        if cum>=args.report*errors: break
    cum = 0
    for proto,n in protoerrs.most_common(100):
        print "protoerr %6d %6d"%(proto,n)
        cum += n
        if cum>=args.report*errors: break
    cum = 0
    for (proto,pred,cls),n in protoconf.most_common(10000):
        print "protoconf %6d   %6d %6d   %6d   %-4s %-4s       %.4f"%(proto,pred,cls,n,udecode(pred),udecode(cls),cum*1.0/errors)
        cum += n
        if cum>=args.report*errors: break

if args.output is not None:
    with openFile(args.output,"w") as odb:
        for fname in args.inputs:
            with openFile(fname) as db:
                table_lcopy(db,odb)
        tpred = zeros((max(preds.keys())+1,3),'i')
        tdist = zeros((max(preds.keys())+1,),'f')
        for i,(cls,pred,n1,d1) in preds.items():
            tpred[i] = (pred,n1,cls)
            tdist[i] = d1
        table_assign(odb,"pred",tpred)
        table_assign(odb,"dist",tdist)
        tconfusion = array([[n,pred,cls] for (pred,cls),n in confusion.most_common(999999999)])
        table_assign(odb,"confusion",tconfusion)
        tprotoerrs = array([[n,proto] for proto,n in protoerrs.most_common(999999999)])
        table_assign(odb,"protoerrs",tprotoerrs)
        tprotoconf = array([[n,proto,pred,cls] for (proto,pred,cls),n in protoconf.most_common(999999999)])
        table_assign(odb,"protoconf",tprotoconf)

protos.close()

