#!/usr/bin/python
#coding:utf-8

# Copyright 2011 Nicolau Leal Werneck, Anna Helena Reali Costa and
# Universidade de São Paulo
#
# Licensed under theglim Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

###############################################################################
## Open a (pinhole camera model) picture, find its orientation and
## extract the rectified.
##
## Changes that must be performed as soon as possible: read the
## intrinsic parameters from somewhere, and make it easy to switch to
## the equirectangular model.

import sys

import time

import matplotlib

if __name__ == '__main__':
    if sys.argv[0][-7:] == '-nox.py':
        matplotlib.use('Agg') 

import plot_aux

from pylab import *

import scipy.io

from numpy import dot

# from camori import PicturePinhole, PictureEqrec, quaternion_to_matrix, measure_error, quaternion_product, dir_colors
from camori_harris import PictureHarris
from camori_equidistant import PictureEquidistant

import camori_aux

from quaternion import Quat, random_quaternion

import simplejson

import scipy.optimize
#from fmin_sa import Sampling
from simplex_sa import SimplexSO3

import Image

from plot_aux import *

set_printoptions(precision=7)





PlotStuff = True
if __name__ == '__main__':
    if PlotStuff:
        rc('text',usetex=False)

    ## Avoid zero divide warnins...
    np.seterr(divide='ignore')

    if PlotStuff:
        ## Plot stuff immediately
        ion()

    #################################################################
    ## Load image and initialize pic object

    ## Sets filename from input argument
    if len(sys.argv) < 3:
        print sys.argv[0], '<job_file.json> <frame_number>'
        raise Exception('Insufficient number of parameters')

    finput = open(sys.argv[1])
    job_params = simplejson.load(finput)
    finput.close()

    fileroot = job_params['root_directory']

    framenum = int(sys.argv[2])
    filename = fileroot+'/frames/'+job_params['filename_format']%framenum

    im = Image.open(filename)
    frame = array(im.convert('RGB'), dtype=float)
    imr = array(im.convert('RGB'), dtype=float)
    imr = imr[:,:,:3] #remove alpha channel

    # Smooth out
    if ("gaussian_smoothing_factor" in job_params.keys() and 
        job_params["gaussian_smoothing_factor"] > 0):
        for c in range(3):
            imr[:,:,c] = scipy.ndimage.gaussian_filter( imr[:,:,c], \
                          double(job_params["gaussian_smoothing_factor"]))

    ## Creates picture object
    if not 'projection_model' in job_params:
        raise "Missing camra model in job file"
    else:
        model = job_params['projection_model']

    if not (model in ['pinhole', 'harris', 'polar_equidistant', 'cylindrical_equidistant']):
        raise NotImplementedError

    elif model == 'pinhole':
        ## Intrinsic parameters
        focal_distance = job_params['focal_distance']
        p_point = array(job_params['principal_point'])
        i_param = array([0.0, focal_distance, p_point[0], p_point[1]])

    elif model == 'harris':
        ## Intrinsic parameters
        focal_distance = job_params['focal_distance']
        p_point = array(job_params['principal_point'])
        distortion = job_params['distortion_coefficient']
        i_param = array([2.0, focal_distance, p_point[0], p_point[1], distortion])
        
    elif model == 'polar_equidistant':
        ## Intrinsic parameters
        focal_distance = job_params['focal_distance']
        p_point = array(job_params['principal_point'])
        i_param = array([3.0, focal_distance, p_point[0], p_point[1]])

    elif model == 'cylindrical_equidistant':
        ## Intrinsic parameters
        focal_distance = job_params['focal_distance']
        p_point = array(job_params['principal_point'])
        i_param = array([4.0, focal_distance, p_point[0], p_point[1]])

    pic = PictureEquidistant(imr, i_param)


    ##
    ##################################################################

    ## Edgel extractor parameters
    gmethod = job_params['edge_detection_method']
    gspc = job_params['grid_spacing']
    gspc = 16
    glim = job_params['gradient_threshold']
    dec_t = job_params['decimator_threshold']
    dec_d = job_params['decimator_distance']
    dec_m = job_params['decimator_method']
    dec_l = job_params['decimator_lower']
    initial_trials = job_params['initial_trials']
    optimization_tolerance = job_params['optimization_tolerance']

    do_decimation = job_params['do_decimation']
    do_robust = job_params['do_robust']
    do_multiple_initializations = job_params['do_multiple_initializations']
    do_optimization = job_params['do_optimization']
    do_second_optimization = job_params['do_second_optimization']

    ##################################################################
    ## Extract the edgels from the image using the grid mask
    pic.extract_edgels(gspc, glim, method=gmethod)
    ##
    ##################################################################



    #qopt = Quat(0.952486, -0.119736, 0.278987, -0.024493) #york1
    #qopt = Quat(0.942090127643, 0.00842906378463, -0.335251810631, 0.00116865335955) #york22
    qopt = Quat(0.987914062639, -0.0265540101475, 0.152704389026, -0.0014349070672) #york90
    #qopt = Quat( 0.9992,  0.0207,  0.0343,  0.0007) #york49
    #qopt = Quat( 0.97516945,  0.01860631,  0.03503033,  0.21787893) #fish 6

    fp = array(job_params["fp_optimization"])
    args_f = (pic.edgels, i_param, fp)
    lab = camori_aux.camori_classify(qopt.q, *args_f)


    aa = ([0,pic.frame.shape[1],pic.frame.shape[0],0])
    figure(1, figsize=(15 *2 / 2.54, 15 *2/2.54 * 9/16))
    suptitle(u'Edgels classificados e direções preditas')

    eee = nonzero((lab[:,1]<1) * (lab[:,0] == 0))[0]
    scale = 7.0
    subplot(2,3,1)
    title(u'Edgels na direção $x$')
    for ee in eee:
        plot((pic.edgels[ee,[0,0]] - scale*np.c_[-pic.edgels[ee,3], pic.edgels[ee,3]]).T,
             (pic.edgels[ee,[1,1]] + scale*np.c_[-pic.edgels[ee,2], pic.edgels[ee,2]]).T,
             '-', color=dir_colors[0])
    axis('equal')
    axis(aa)

    eee = nonzero((lab[:,1]<1) * (lab[:,0] == 1))[0]
    subplot(2,3,2)
    title(u'Edgels na direção $y$')
    for ee in eee:
        plot((pic.edgels[ee,[0,0]] - scale*np.c_[-pic.edgels[ee,3], pic.edgels[ee,3]]).T,
             (pic.edgels[ee,[1,1]] + scale*np.c_[-pic.edgels[ee,2], pic.edgels[ee,2]]).T,
             '-', color=dir_colors[1])
    axis('equal')
    axis(aa)

    eee = nonzero((lab[:,1]<1) * (lab[:,0] == 2))[0]
    subplot(2,3,3)
    title(u'Edgels na direção $z$')
    for ee in eee:
        plot((pic.edgels[ee,[0,0]] - scale*np.c_[-pic.edgels[ee,3], pic.edgels[ee,3]]).T,
             (pic.edgels[ee,[1,1]] + scale*np.c_[-pic.edgels[ee,2], pic.edgels[ee,2]]).T,
             '-', color=dir_colors[2])
    axis('equal')
    axis(aa)


    subplot(2,3,4)
    title(u'Predições na direção $x$')
    pic.plot_vdirs(gca(), 61, qopt, labrange=[0])
    axis('equal')
    axis(aa)

    subplot(2,3,5)
    title(u'Predições na direção $y$')
    pic.plot_vdirs(gca(), 61, qopt, labrange=[1])
    axis('equal')
    axis(aa)

    subplot(2,3,6)
    title(u'Predições na direção $z$')
    pic.plot_vdirs(gca(), 61, qopt, labrange=[2])
    axis('equal')
    axis(aa)
