#!/usr/bin/python

import numpy,os,os.path,sys
from collections import Counter
from pylab import *
import tables,scipy
from scipy import fftpack
from scipy.cluster import vq
from scipy.ndimage import morphology,filters,interpolation
from ocrolib import docproc,improc
import pyflann
import random
from tables import openFile,Filters,Int32Atom,Float32Atom,Int64Atom
from multiprocessing import Pool
import fcntl
from sutils import *
from ocrolib.pycomp import PyComponent
from pdb import pm
import mlinear

def sigmoid(x):
    return 1/(1+exp(-clip(x,-20,20)))

def classifier_normalize(image,size=32):
    assert amax(image)<1.1
    if amax(image)<1e-3: return zeros((size,size))
    cimage = array(image*1.0/amax(image),'f')
    assert amax(cimage)<1.1
    cimage = docproc.isotropic_rescale(cimage,size)
    assert amax(cimage)<1.1
    cimage = csnormalize(cimage)
    assert amax(cimage)<1.1
    return cimage

class ClassifierBase(PyComponent):
    def __init__(self,**kw):
        self.set(**kw)
    def set(self,**kw):
        for k,v in kw.items():
            if k in dir(self.__dict__):
                self.__dict__[k] = v
            else:
                print "warning: cannot set parameter",k
    def clear(self):
        raise Exception("unimplemented")
    def cadd(self,image,c,geometry=None):
        raise Exception("unimplemented")
    def updateModel(self,*args,**kw):
        raise Exception("unimplemented")
    def save_component(self,path):
        with open(path,"w") as stream:
            cPickle.dump(self,stream)
    def coutputs_batch(self,images,geometries=None):
        result = []
        for i in range(len(images)):
            try:
                output = self.coutputs(images[i],geometries[i]) 
            except:
                print "recognition failed"
                output = []
            result.append(output)
        return result
    def cclassify(self,v,geometry=None):
        pass
    # subclass responsibility
    def coutputs(self,image,rel=None):
        raise Exception("unimplemented")

import variants

class SimpleNN(ClassifierBase):
    def __init__(self,k=1,nvariants=1):
        ClassifierBase.__init__(self)
        self.k = k
        self.nvariants = nvariants
    def __getstate__(self):
        return dict([(k,v) for k,v in self.__dict__.items() if k not in ["nn","pdb"]])
    def rebuild(self):
        self.nn = NNIndex()
        print "building index (FIXME)"
        self.nn.build_index(make2d(self.protos))
        print "index built"
    def __setstate__(self,state):
        self.__dict__.update(state)
        self.rebuild()
    def set(self,**kw):
        for k,v in kw.items():
            assert k in dir(self)
            setattr(self,k,v)
    def load(self,path):
        print "loading",path
        with openFile(path) as pdb:
            nprotos = len(pdb.root.classes)
            self.nprotos = nprotos
            self.classes = array(pdb.root.classes[:nprotos])
            self.protos = array(pdb.root.patches[:nprotos])
        print "loaded",self.nprotos
        self.rebuild()
    def coutputs(self,image,rel=None):
        if amax(image)==amin(image): 
            return [("~",0.0)]
        try:
            cimage = classifier_normalize(image)
        except:
            print "WARNING: preprocessing failed in SimpleNN"
            return [("~",0.0)]
        query = array(variants.random_variants(cimage,n=self.nvariants))
        ns,ds = self.nn.nn_index(make2d(array([cimage])),self.k)
        ns = ns.ravel()
        ds = ds.ravel()
        order = argsort(ds)
        ns = ns[order]
        ds = ds[order]
        clss = self.classes[ns]
        if self.k>1:
            outputs = counter2outputs(Counter(clss))
        else:
            cls = clss[0]
            outputs = [(udecode(cls),exp(-ds[0]/10.0))]
        return outputs

class ProtoRejectClassifier(ClassifierBase):
    def __init__(self):
        ClassifierBase.__init__(self)
        self.protos = None
    def load(self,path):
        self.protos = Protos()
        self.protos.load(path)
        assert self.protos.posteriors is not None
    def coutputs(self,image,geometry=None,reject=0):
        cimage = classifier_normalize(image)
        [[n1]],[[d1]] = self.protos.nn_index(array([cimage]))
        mu = self.protos.d_means[n1]
        sig = self.protos.d_sigmas[n1]
        ps = self.protos.posteriors[n1]
        rc = self.protos.rejclass(n1)
        if rc is not None:
            r = sorted(rc.outputs(array([cimage])))[0]
            # print r
            r = dict(r)[1]
            ps = [(c,p*r) for c,p in ps]
            if reject: ps += [("~",1.0-r)]
        elif not isnan(mu) and not isnan(sig):
            err = (d1-mu)/maximum(0.1,sig)
            rej = sigmoid(clip((err-3.0)*4.0,-5,5))
            assert not isnan(rej)
            ps = [(c,p*(1.0-rej)) for c,p in ps]
            if reject: ps += [("~",rej)]
        ps = sorted(ps,key=lambda x:-x[1])
        return ps

class ComboClassifier(ClassifierBase):
    def __init__(self,base,reject):
        ClassifierBase.__init__(self)
        with open(base) as stream:
            self.base = cPickle.load(stream)
        with open(reject) as stream:
            self.reject = cPickle.load(reject)
    def coutputs(self,image,geometry=None,reject=0):
        outputs = self.base.coutputs(image)
        cimage = classifier_normalize(image)
        reject = self.reject.outputs(cimage)
        outputs = [(c,v*reject[0]) for c,v in outputs]
        return result

from scipy.spatial import distance

class KernelClassifier(ClassifierBase):
    def __init__(self,protos=None,sigmas=None,m=None,classes=None):
        ClassifierBase.__init__(self)
        self.protos = protos
        self.sigmas = sigmas
        self.m = m
        self.s = 2*sigmas**2
        self.classes = array(classes)
        self.debug = 0
    def coutputs(self,image,geometry=None,reject=0):
        cimage = classifier_normalize(image)
        d = distance.cdist(array([cimage.ravel()]),make2d(self.protos)).ravel()
        if self.debug: print d[:5]
        w = concatenate([[1.0],exp(-d**2/self.s)])
        if self.debug: print w[:5]
        out = dot(array([w]),self.m).ravel()
        if self.debug: print out[:5]
        result = zip(self.classes,out)
        result = [(udecode(c),p) for c,p in result]
        result = sorted(result,key=lambda x:-x[1])
        if self.debug: result[:3]
        return result


if 0:
    from pylab import *
    import tables
    ion()
    db = tables.openFile("../ocrosynt/data-1/training.h5")
    c = SimpleNN(k=1)
    c.load("c2rej.h5")
    print len(db.root.patches)

    errors = 0
    berrors = 0
    total = 0

    print "starting"

    for i in range(1000,1010):
        cls = udecode(db.root.classes[i])
        if total%1000==0: 
            print "total",total,"errors",errors,berrors
        outputs = c.coutputs(db.root.patches[i])
        total += 1
        print cls,outputs[0][0]
        if len(outputs)==0 or cls!=outputs[0][0]: errors += 1
        if cls in outputs: berrors += 1

def unlist(x):
    if type(x)!=list: return x
    assert len(x)==1
    return x[0]

class LocLinClassifier(ClassifierBase):
    def __init__(self,path=None,**kw):
        ClassifierBase.__init__(self)
        self.protos = None

        self.verbose = 0
        self.report = 0
        
        # default penalties are tiny 
        # extra penalty for multi-character outputs
        self.len_cost_penalty = 3.0
        # covariance-based matrix
        self.sigmas_cost_scale = 0
        # penalize by this factor times d/r
        self.distance_cost_scale = 0
        # reject characters whose rel cost is larger than this
        self.max_rel_cost = 1e5
        # scale for rel cost to contribute to total cost
        self.rel_cost_scale = 0
        # scale for count cost
        self.count_cost_scale = 0

        # statistics reporting
        self.runs = 0
        self.stats = ValueStats()

        # set any parameters
        for k,v in kw:
            assert k in self.__dict__.keys()
            self.__dict__[k] = v

        # load the files, if any
        if path is not None: self.load(path)

    def __getstate__(self):
        return dict([(k,v) for k,v in self.__dict__.items() if k not in ["nn","pdb"]])

    def __setstate__(self,state):
        self.__dict__.update(state)
        self.makeindex()
        if "lneighbors" not in dir(self): self.lneighbors = 3

    def makeindex(self):
        self.nn = NNIndex()
        print "building index (FIXME)"
        self.nn.build_index_cached(make2d(self.protos))
        print "index built"
        print self.nn

    def load(self,path):
        with tables.openFile(path) as pdb:
            n = len(pdb.root.classes)
            # first, load the linear classifiers
            if "lpcs" in dir(pdb.root):
                self.lpcs = [unlist(x) for x in pdb.root.lpcs[:n]]
            else:
                self.lpcs = [None]*n
            if "rellpcs" in dir(pdb.root):
                self.rellpcs = [unlist(x) for x in pdb.root.rellpcs[:n]]
                self.reldens = [unlist(x) for x in pdb.root.reldens[:n]]
            else:
                print "warning: no rellpcs in model"
                self.rellpcs = [None]*n
                self.reldens = [None]*n
            sel = array([i for i in range(n) if self.lpcs[i] is not None or self.rellpcs[i] is not None],'i')

            print "selected",len(sel),"with linear classifiers from",n,"prototypes"
            assert len(sel)>0

            nprotos = len(sel)
            self.nprotos = nprotos
            if "lpcs" in dir(self):
                self.lpcs = [self.lpcs[i] for i in sel]
            if "rellpcs" in dir(self):
                self.rellpcs = [self.rellpcs[i] for i in sel]
            if "reldens" in dir(self):
                self.reldens = [self.reldens[i] for i in sel]

            self.protos = array([pdb.root.patches[i] for i in sel],'f')
            self.pclasses = array([pdb.root.classes[i] for i in sel],'i')
            self.counts = array([sum(pdb.root.hists[i].values()) for i in sel],'i')
            self.hists = [pdb.root.hists[i] for i in sel]
            self.sigmas = array([pdb.root.sigmas[i] for i in sel])
            self.rs = array([pdb.root.rs[i] for i in sel])
        self.makeindex()
        self.maxcounts = 1+amax(self.counts)
        print "nprotos",self.nprotos,
        print "lpcs",len([x for x in self.lpcs if x is not None]),
        print "rellpcs",len([x for x in self.rellpcs if x is not None])
        print "max counts",self.maxcounts,log(self.maxcounts)

    def loutputs(self,image,geometry=None,reject=0):
        self.runs += 1
        if self.report>0 and self.runs%self.report==0:
            self.stats.report("#csclass-stats:")

        if amax(image)==amin(image): return [("~",99.9)]
        cimage = classifier_normalize(image)

        # keep track of reject costs
        
        rcost = 0

        # find the closest neighbors

        ns,ds = self.nn.nn_index(array([cimage.ravel()]),2)
            
        # first, get the posterior using either the linear models
        # or the histograms

        lloutputs = []
        n1 = ns.ravel()[0]
        d1 = ds.ravel()[0]

        lpc = unlist(self.lpcs[n1])
        rellpc = unlist(self.rellpcs[n1])
        if rellpc is not None:
            v = rel_combine(array([cimage.ravel()]),array([geometry]))
            r = rellpc.outputs(v)[0]
            gotlin = 1
        elif lpc is not None:
            r = lpc.outputs(array([cimage.ravel()],'f'))[0]
            gotlin = 1
        else:
            hist = self.hists[n1]
            if type(hist)==list: hist = hist[0]
            if hist is not None:
                r = counter2outputs(hist)
            else:
                r = [(self.pclasses[n1],1.0)]
            gotlin = 0
        loutputs = dict([(udecode(k),-log(max(v,1e-9))) for k,v in r if k!="~"])

        # penalize based on covariance matrix
        if self.sigmas_cost_scale>0:
            s = maximum(0.1,self.sigmas[n1])
            c = scosts(array([s.ravel()]))[0]
            # NB: this is a likelihood and can be positive or negative
            d = ddist(cimage,self.protos[n1],s,c)
            self.stats(d,"scosts-d")
            rcost += d*self.sigmas_cost_scale 

        # count costs
        if self.count_cost_scale>0:
            rarity = self.maxcounts - self.counts[n1]
            self.stats(rarity,"rarity")
            lrarity = log(max(1,rarity))
            self.stats(lrarity,"lrarity")
            rcost += lrarity*self.count_cost_scale

        # outside cost based on distance
        self.stats(d1>self.rs[n1],"outside")
        self.stats(d1>2*self.rs[n1],"outside2")
        self.stats(d1,"d1")
        if self.distance_cost_scale>0:
            c = d1/max(10.0,self.rs[n1])
            self.stats(c,"distance_cost")
            rcost += self.distance_cost_scale * c

        self.stats(rcost,"rcost")

        # add the rcost, then switch over to per-class penalties
        for k,v in loutputs.items():
            loutputs[k] += rcost

        # penalize based on length (per class)
        for k,v in loutputs.items():
            self.stats(len(k),"lengths")
            if len(k)>1: loutputs[k] += self.len_cost_penalty

        # rel costs
        if self.rel_cost_scale>0:
            reldens = self.reldens[n1]
            self.stats(reldens==None,"reldens-is-none")
            if reldens is not None:
                for k,v in loutputs.items():
                    rc = reldens[uencode(k)].cost(geometry.ravel())
                    self.stats(rc,"reldens-cost")
                    self.stats(rc>=self.max_rel_cost,"reldens-deleted")
                    if rc>=self.max_rel_cost:
                        del loutputs[k]
                    else:
                        loutputs[k] += rc*self.rel_cost_scale

        if len(loutputs.items())==0: return [("~",99.9)]
        return sorted(loutputs.items(),key=lambda x:x[1])

    def coutputs(self,x,geometry=None):
        loutputs = self.loutputs(x,geometry)
        return [(k,exp(-min(v,100))) for k,v in loutputs]

