#!/usr/bin/python

# TODO:
# -- add rel-based selection for training?

import os,time
import numpy
from collections import Counter
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
import tables
import multiprocessing
import fcntl
from pdb import pm
from collections import Counter
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description = "Compute local linear models.")
parser.add_argument('-d','--data',default="DATA/training.h5",help="data file")
parser.add_argument('-p','--protos',default=None,help="prototypes file")
parser.add_argument('-k','--kmeans',default=None,type=int,help="kmeans clustering")
parser.add_argument('-o','--output',help='output file')
parser.add_argument("-n",'--neighbors',default=10000,type=int,help="number of candidate neighbors")
parser.add_argument("-N","--nsamples",default=200000,type=int,help="number of samples")
parser.add_argument("--pca_k",default=10,type=int,help="k for PCA in logistic regression")
parser.add_argument("--csize",default=100000,type=int,help="chunk size for neighborhoods")
parser.add_argument("-Q","--par",default=multiprocessing.cpu_count(),type=int,help="parallelism")
if 1: args = parser.parse_args("-p DATA/protos032a.h5".split())
else: args = parser.parse_args()

def sigmoid(x):
    return 1/(1+exp(-clip(x,-20,20)))

if "tdata" not in dir():
    print "loading tdata"
    with tables.openFile(args.data) as db:
        indexes = range(len(db.root.classes))
        indexes = sorted(random.sample(indexes,args.nsamples))
        tdata = array([db.root.patches[i].ravel() for i in indexes])
        tclasses = array([db.root.classes[i] for i in indexes])
    print "done"
    nn = NNIndex()
    nn.build_index(tdata)

if "protos" not in dir():
    if args.protos is not None:
        with tables.openFile(args.protos) as pdb:
            nprotos = len(pdb.root.patches)
            protos = make2d(array(pdb.root.patches[:nprotos:10],'f'))
    elif args.kmeans is not None:
        print "computing protos"
        protos = kmeans(tdata,args.kmeans)
        print "done"
    else:
        print "must specify either -p or -k"
        sys.exit(1)

nprotos = len(protos)
print "nprotos",nprotos

import mlinear

from scipy.spatial.distance import cdist

A = []
a = []

for proto in range(nprotos):
    print proto
    [ns],[ds] = nn.nn_index(protos[proto].ravel(),args.neighbors)
    data = array([tdata[i] for i in ns])
    data = c_[data,sum(data**2,axis=1)]
    clss = array([tclasses[i] for i in ns])
    tcls = Counter(clss[:100]).most_common(1)[0][0]
    good = (clss==tcls)
    lpc = mlinear.LinPcaClassifier()
    lpc.train(data,1*good,k=args.pca_k)
    preds = [dict(s)[1] for s in lpc.outputs(data)]
    A.append(lpc.R[1])
    a.append(lpc.r[1])
    if 1:
        ion(); clf()
        ylim((0,1.0))
        xlim((0,150.0))
        plot(ds,filters.gaussian_filter(1.0*good,30.0)); ginput(1,0.001)
        plot(ds,filters.gaussian_filter(preds,30.0)); ginput(1,0.001)

A = array(A)
a = array(a)



tdata2 = sigmoid(dot(c_[tdata[:10000],sum(tdata[:10000]**2,axis=1)],A.T)+a[newaxis,:])

print "training output"
lpc = mlinear.LinPcaClassifier()
lpc.train(tdata2,tclasses[:len(tdata2)])
print "done"
tpred2 = array(lpc.classify(tdata2))
print "result",len(protos),sum(tpred2!=tclasses[:len(tpred2)])
