#!/usr/bin/python

# TODO:
# -- add rel-based selection for training?

import os,time
import numpy
from collections import Counter
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
import tables
import multiprocessing
import fcntl
from pdb import pm
from collections import Counter
from llpy import variants
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description = "Compute local nearest neighbor information.")
parser.add_argument('data',help="data file")
parser.add_argument('-r','--rejects',action="store_true",help="include rejects")
parser.add_argument('-p','--protos',default="protos.h5",help="prototypes file")
parser.add_argument('-o','--output',default="nn.h5",help='output file')
parser.add_argument("-k",'--neighbors',default=3000,type=int,help="number of candidate neighbors")
parser.add_argument("-Q",'--par',default=multiprocessing.cpu_count(),type=int,help="parallelism")
parser.add_argument("-N","--nsamples",default=2000000000,type=int,help="number of samples")
parser.add_argument("-D","--ddisplay",action="store_true")
parser.add_argument("--chunksize",default=10000,type=int)
args = parser.parse_args()
if args.ddisplay==1: args.par=0

print "loading databases"
with tables.openFile(args.data) as db:
    nsamples = min(args.nsamples,len(db.root.classes))
    classes = array(db.root.classes[:nsamples])

with tables.openFile(args.protos) as pdb:
    protos = array(pdb.root.patches,'f')
    psigmas = None
    if "sigmas" in dir(pdb.root):
        psigmas = array(pdb.root.sigmas,'f')
        pcosts = scosts(psigmas)

print "nsamples",nsamples
print "neighbors",args.neighbors
print "nprotos",len(protos)

def nn_process(job):
    protos,fname,k,i,j = job
    print fname,i,j,protos.shape
    with tables.openFile(fname) as db:
        indexes = arange(i,j)
        batch = array(db.root.patches[i:j])
        classes = array(db.root.classes[i:j])
    if not args.rejects:
        sel = ((classes!=ord("_"))&(classes!=ord("~")))
        indexes = indexes[sel]
        batch = batch[sel]
        classes = classes[sel]
    with NNIndex() as nn:
        neighbors2,dists2 = nn.nn(make2d(batch),make2d(protos),num_neighbors=k)
    if psigmas is not None:
        for s,u in enumerate(neighbors2):
            for t,v in enumerate(u):
                r = neighbors2[s,t]
                dists2[s,t] = ddist(batch[r],protos[s],psigmas[s],pcosts[s])
    neighbors2 = indexes[neighbors2]
    return neighbors2,dists2,i,j

with tables.openFile(args.data) as db:
    nsamples = min(nsamples,len(db.root.patches))

if args.par>1:
    pool = multiprocessing.Pool(args.par)

neighbors,dists = (None,None)
jobs = [(protos,args.data,args.neighbors,i,j) for i,j in chunks(nsamples,args.chunksize)]

if args.par<2:
    for job in jobs:
        n2,d2,i,j = nn_process(job)
        # print i,j
        assert amin(n2)>=i and amax(n2)<j
        neighbors,dists = nn_merge_dists(neighbors,dists,n2,d2)
else:
    for n2,d2,i,j in pool.imap(nn_process,jobs):
        # print i,j
        assert amin(n2)>=i and amax(n2)<j
        neighbors,dists = nn_merge_dists(neighbors,dists,n2,d2)

with tables.openFile(args.output,"w") as odb:
    with tables.openFile(args.protos) as pdb:
        table_copy(pdb,odb)
        table_lcopy(pdb,odb)
    table_log(odb,"%s %s"%(sys.argv,time.asctime()))
    table_assign(odb,"neighbors",array(neighbors,'i'))
    table_assign(odb,"dists",array(dists,'f'))
