#!/usr/bin/python

import os,time
import numpy
from collections import Counter
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
import tables
import multiprocessing
import fcntl
from pdb import pm
from collections import Counter
from llpy import variants
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description = "Compute local linear models.")
parser.add_argument('data',help="data file")
parser.add_argument('-p','--protos',help="prototypes file")
parser.add_argument('-o','--output',help='output file')
parser.add_argument("-N","--nsamples",default=2000000000,type=int,help="number of samples")
parser.add_argument("-s","--sigma",default=10.0,type=float,help="kernel width")
parser.add_argument("-d","--kspace",default=10,type=int,help="pca dimensions for local linear training")
parser.add_argument("-Q","--par",default=multiprocessing.cpu_count(),type=int,help="parallelism")
parser.add_argument("-J","--junk",default=0,type=int,help="number of junk samples")
parser.add_argument("-n","--neighbors",default=5000,type=int,help="number of neighbors")
parser.add_argument('--logreg_l2',default=1.0,type=float,help="l2 regularization parameter")
parser.add_argument("--csize",default=100000,type=int,help="chunk size for neighborhoods")
parser.add_argument("--testprotos",default=1000000000,type=int,help="max # protos")
parser.add_argument("--nologistic",action="store_true",help="use linear least squares instead of logistic regression")
cmdline = "-d 10 -N 30000 -n 500 --csize 8000 -p data/protos32a.h5 -o output.h5 data/training.h5".split()
args = parser.parse_args(cmdline)
# args = parser.parse_args()

with tables.openFile(args.data) as db:
    nsamples = min(args.nsamples,len(db.root.classes))
    classes = array(db.root.classes[:nsamples])
    print "nsamples",nsamples

def background(cls,n):
    noise = random.sample(find(classes==ord("~")),n)
    return noise

with tables.openFile(args.protos) as pdb:
    nprotos = minimum(args.testprotos,len(pdb.root.classes))
    protos = array(pdb.root.patches[:nprotos],'f')
    pclasses = array(pdb.root.classes[:nprotos],'i')
    print "nprotos",nprotos

nn = NNIndex()
nn.build_index(make2d(protos))

def batch_neighbors(job):
    print job
    start,end = job
    with tables.openFile(args.data) as db:
        batch = array(db.root.patches[start:end],'f')
        ns,ds = nn.nn(make2d(batch),make2d(protos),args.neighbors)
    return (start,end,ns,ds)

neighbors = None
dists = None

for start,end,ns,ds in poolmap(batch_neighbors,chunks(nsamples,args.csize)):
    neighbors,dists = nn_merge_dists(neighbors,dists,ns,ds)

print protos.shape



import mlinear
reload(mlinear)

def train_proto(proto):
    global lprotos,ldists,data

    # get the protos for the sample
    nnbr = 20
    [lprotos],[ldists] = nn.nn_index(array(protos[proto].ravel()),nnbr)
    centers = protos[lprotos]
    ion(); subplot(121); showgrid(centers,r=5); ginput(1,0.1)

    # get the corresponding samples
    ns = neighbors[proto,:]
    with tables.openFile(args.data) as db:
        data = make2d(array([db.root.patches[i] for i in ns]))
    clss = classes[ns]

    # perform the training
    global lpc
    lpc = mlinear.LinKernelClassifier()
    lpc.train(data,clss,centers,args.sigma,linear=args.nologistic)
    return
    radius2 = 2.0*args.sigma
    nnbr2 = find(dists[proto]<radius2)[-1]
    ns2 = neighbors[proto,:nnbr2]
    nclasses2 = array(Counter(classes[ns2]).most_common(100))
    err = nclasses2[0,1]*1.0/sum(nclasses2[:,1])
    with tables.openFile(args.data) as db:
        data2 = make2d(array([db.root.patches[i] for i in ns2]))
    clss2 = classes[ns2]
    pred = lpc.classify(data2)
    nerr = sum(pred!=clss2)*1.0/len(clss2)
    print proto,err,nerr,radius,sum(pred!=clss2),len(clss2)

for i in range(10):
    train_proto(i)



hists = {}
lpcs = {}
rs = {}
r2s = {}
for proto,lpc,nerr,r,r2,h in poolmap(train_proto,range(nprotos),n=args.par):
    lpcs[proto] = lpc
    rs[proto] = r
    r2s[proto] = r2
    hists[proto] = h



print "saving"
with tables.openFile(args.output,"w") as odb:
    with tables.openFile(args.protos) as pdb:
        table_copy(pdb,odb)
        table_lcopy(pdb,odb)
    table_log(odb,"%s %s"%(sys.argv,time.asctime()))
    odb.createEArray(odb.root,"rs",tables.Float32Atom(),shape=(0,),filters=tables.Filters(9))
    odb.createEArray(odb.root,"r2s",tables.Float32Atom(),shape=(0,),filters=tables.Filters(9))
    odb.createVLArray(odb.root,'lpcs',tables.ObjectAtom(),filters=tables.Filters(9))
    odb.createVLArray(odb.root,'hists',tables.ObjectAtom(),filters=tables.Filters(9))
    for proto in range(nprotos):
        odb.root.lpcs.append([lpcs.get(proto)])
        odb.root.rs.append([rs[proto]])
        odb.root.r2s.append([r2s[proto]])
        odb.root.hists.append(hists.get(proto))
print "done"        
            
