#!/usr/bin/python

import sys,os
from pylab import *
from scipy.ndimage import filters,morphology,interpolation
from scipy import stats
import tables
import multiprocessing
from llpy.sutils import *

import argparse
parser = argparse.ArgumentParser(description="Degrade characters.")
parser.add_argument('input',default='kmeans.h5',help="prototype file")
parser.add_argument('-o','--output',default='alldists.h5',help="output file")
parser.add_argument('-O','--original',action="store_true",help="also copy the original data")
parser.add_argument('-N','--nsamples',type=int,default=2000000000,help="max # of samples")
parser.add_argument('-m','--magnitude',default=1.0,type=float,help="magnitude of the scale")
parser.add_argument('-r','--repeat',default=1,type=int,help="how often to repeat the sample")
parser.add_argument('-d','--delta',default=0.05,type=float,help="how much to thicken/thin chars")
parser.add_argument('-s','--smooth',default=1.5,type=float,help="maximum smoothing")
parser.add_argument('-n','--noise',default=0.1,type=float,help="maximum noise")
parser.add_argument('-R','--resize',action='store_true',help="apply size normalization")
parser.add_argument('-Q','--par',default=3,type=int,help="parallelism")
parser.add_argument('--chunksize',default=100000,type=int)
args = parser.parse_args()
assert args.repeat==1

def translate(image,delta):
    return interpolation.shift(image,delta,order=1)

def skew(image,s):
    m = array([[1.0,0],[-s,1.0]])
    w,h = image.shape
    c = array([w/2.0,h/2])
    d = c-dot(m,c)
    return interpolation.affine_transform(image,m,offset=d,order=1)

def zoom(image,z):
    m = array([[1.0+z,0],[0.0,1.0+z]])
    w,h = image.shape
    c = array([w/2.0,h/2])
    d = c-dot(m,c)
    return interpolation.affine_transform(image,m,offset=d,order=1)

def aspect(image,z):
    m = array([[1.0+z,0],[0.0,1.0-z]])
    w,h = image.shape
    c = array([w/2.0,h/2])
    d = c-dot(m,c)
    return interpolation.affine_transform(image,m,offset=d,order=1)

def uniform(lo,hi):
    return rand()*(hi-lo)+lo

def degrade(image,delta=0.0,smooth=1.0,noise=0.05):
    image = source
    image = image/amax(image)
    frac = sum(image>0.5)*1.0/image.size
    smoothed = filters.gaussian_filter(image,smooth)+noise*randn(*image.shape)
    threshold = stats.scoreatpercentile(smoothed.ravel(),100*(1.0-(1.0+delta)*frac))
    print frac,threshold,sum(image>0.5),sum(smoothed>threshold)
    return smoothed>threshold

def degrade2(image,delta=0.0,smooth=1.0,noise=0.02,M=1.0):
    deg = image/amax(image)
    frac = sum(deg>0.5)*1.0/deg.size
    deg = aspect(deg,uniform(-0.05*M,0.05*M))
    deg = skew(deg,uniform(-0.1*M,0.1*M))
    deg = zoom(deg,uniform(-0.05*M,0.05*M))
    deg = interpolation.shift(deg,shift=(uniform(-0.5,0.5),uniform(-0.5,0.5)))
    deg = filters.gaussian_filter(deg,smooth)+noise*randn(*deg.shape)
    threshold = stats.scoreatpercentile(deg.ravel(),100*(1.0-(1.0+delta)*frac))
    return deg>threshold

def test():
    for i in range(100):
        out = degrade2(image,delta=0,smooth=0.5,M=2)
        clf(); subplot(121); imshow(image); subplot(122); imshow(out)
        ginput(1,0.1)

def process(job):
    print job
    start,end = job
    rel = None
    with tables.openFile(args.input) as db:
        classes = db.root.classes[start:end]
        if "rel" in dir(db.root): rel = db.root.rels[start:end]
        result = []
        for i in range(start,end):
            p = db.root.patches[i]
            s = uniform(0.5,args.smooth)
            delta = uniform(-s*args.delta,s*args.delta)
            noise = uniform(0.0,args.noise)
            try:
                pd = degrade2(p,delta=delta,smooth=s,noise=noise,M=args.magnitude)
            except:
                pd = p
            if args.resize:
                pd = csnormalize(pd)
            result.append(pd)
    with flock(args.output+".lock",1):
        with tables.openFile(args.output,"r+") as odb:
            odb.root.patches.append(result)
            odb.root.classes.append(classes)
            if rel is not None: odb.root.rels.append(rel)

pool = multiprocessing.Pool(args.par)

with tables.openFile(args.input) as db, tables.openFile(args.output,"w") as odb:
    nsamples = min(args.nsamples,len(db.root.classes))
    shape = list(db.root.patches.shape[1:])
    table_lcopy(db,odb)
    table_log(odb,str(sys.argv))
    print "creating output array"
    odb.createEArray(odb.root,"patches",tables.Float32Atom(),shape=[0]+shape,filters=tables.Filters(9))
    odb.createEArray(odb.root,"classes",tables.Int64Atom(),shape=[0],filters=tables.Filters(9))
    if "rel" in dir(db.root): 
        odb.createEArray(odb.root,"rel",tables.Float32Atom(),shape=[0,3],filters=tables.Filters(9))
    if args.original:
        print "copying original data"
        for i,j in chunks(len(db.root.classes),1000):
            odb.root.patches.append(db.root.patches[i:j])
            odb.root.classes.append(db.root.classes[i:j])
            if "rel" in dir(db.root): 
                odb.root.rels.append(db.root.rels[i:j])

jobs = chunks(nsamples,args.chunksize)
if args.repeat>0: jobs = jobs*args.repeat

if args.par<2:
    for x in jobs: process(x)
else:
    pool.map(process,jobs)
