//
//  MPSCNNSoftMax.h
//  MPS
//
//  Created on 8/21/16.
//  Copyright © 2016 Apple. All rights reserved.
//

#ifndef MPSCNNSoftMax_h
#define MPSCNNSoftMax_h

#include <MPSNeuralNetwork/MPSCNNKernel.h>

#ifdef __cplusplus
extern "C" {
#endif

#pragma mark - MPSCNNSoftMax


/*!
 *  @class      MPSCNNSoftMax
 *  @dependency This depends on Metal.framework
 *  @discussion The softMax filter is a neural transfer function and is useful for classification tasks.
 *              The softMax filter is applied across feature channels and in a convolutional manner at all
 *              spatial locations. The softMax filter can be seen as the combination of an
 *              activation function (exponential) and a normalization operator.
 *              For each feature channel per pixel in an image in a feature map, the softMax filter computes the following:
 *                  result channel in pixel = exp(pixel(x,y,k))/sum(exp(pixel(x,y,0)) ... exp(pixel(x,y,N-1))
 *                      where N is the number of feature channels
 *
 */
MPS_CLASS_AVAILABLE_STARTING( macos(10.13), ios(10.0), macCatalyst(13.0), tvos(10.0))
@interface MPSCNNSoftMax : MPSCNNKernel

@end    /* MPSCNNSoftMax */

/*!
 *  @class      MPSCNNSoftMaxGradient
 *  @dependency This depends on Metal.framework
 *  @discussion The softMax gradient filter calculates the gradient to be backpropagated.
 *              The softMax gradient just as the softMax filter, is applied across feature channels and at all spatial locations.
 *              It computes the gradient for a given output generated by the corresponding softMax (i.e. MPSCNNSoftMax) layer and
 *              the gradient computed by the previous layer in the back-propagation pass.
 *              For each feature channel in an image in a feature map, the softMax gradient filter computes the following:
 *                  result gradient channel in pixel
 *                      outputGradient(x,y,k) = softMax(x,y,k) * (inputGradient(x,y,k) -
 *                                               sum(inputGradient(x,y,0) * softMax(x,y,0) ... inputGradient(x,y,N-1) * softMax(x,y,N-1)))
 *                      where N is the number of feature channels
 *
 *              The incoming gradient is the primary source.
 *              The original output of corresponding softMax is the secondary source.
 *
 */
MPS_CLASS_AVAILABLE_STARTING( macos(10.13.4), ios(11.3), macCatalyst(13.0), tvos(11.3))
@interface MPSCNNSoftMaxGradient : MPSCNNGradientKernel

/*!
 *  @abstract   Initializes a MPSCNNSoftMaxGradient function
 *  @param      device                          The MTLDevice on which this MPSCNNSoftMaxGradient filter will be used
 *
 *  @return     A valid MPSCNNSoftMaxGradient object or nil, if failure.
 */
-(nonnull instancetype) initWithDevice: (nonnull id <MTLDevice>) device NS_DESIGNATED_INITIALIZER;

/*! @abstract NSSecureCoding compatability
 *  @discussion While the standard NSSecureCoding/NSCoding method
 *              -initWithCoder: should work, since the file can't
 *              know which device your data is allocated on, we
 *              have to guess and may guess incorrectly.  To avoid
 *              that problem, use initWithCoder:device instead.
 *  @param      aDecoder    The NSCoder subclass with your serialized MPSKernel
 *  @param      device      The MTLDevice on which to make the MPSKernel
 *  @return     A new MPSKernel object, or nil if failure.
 */
-(nullable instancetype) initWithCoder:(NSCoder * __nonnull)aDecoder
                                device:(nonnull id <MTLDevice>) device NS_DESIGNATED_INITIALIZER;

@end    /* MPSCNNSoftMaxGradient */

#pragma mark - MPSCNNLogSoftMax

/*!
 *  @class      MPSCNNLogSoftMax
 *  @dependency This depends on Metal.framework
 *  @discussion The logarithmic softMax filter can be achieved by taking the natural logarithm of the
 *              the result of the softMax filter. The results are often used to construct a loss function to be
 *              minimized when training neural networks.
 *              For each feature channel per pixel in an image in a feature map, the logarithmic softMax filter
 *              computes the following:
 *                  result channel in pixel = pixel(x,y,k)) - ln{sum(exp(pixel(x,y,0)) ... exp(pixel(x,y,N-1))}
 *                      where N is the number of feature channels and y = ln{x} satisfies e^y = x.
 *
 */
MPS_CLASS_AVAILABLE_STARTING( macos(10.13), ios(10.0), macCatalyst(13.0), tvos(10.0))
@interface MPSCNNLogSoftMax : MPSCNNKernel

@end    /* MPSCNNLogSoftMax */


/*!
 *  @class      MPSCNNLogSoftMaxGradient
 *  @dependency This depends on Metal.framework
 *  @discussion The logSoftMax gradient filter calculates the gradient to be backpropagated.
 *              The logSoftMax gradient just as the log softMax filter, is applied across feature channels and at all spatial locations.
 *              It computes the gradient for a given output generated by the corresponding logSoftMax (i.e. MPSCNNLogSoftMax) layer and
 *              the gradient computed by the previous layer in the back-propagation pass.
 *              For each feature channel per pixel in an image in a feature map, the logSoftMax gradient filter computes the following:
 *                  result gradient channel in pixel
 *                      outputGradient(x,y,k) = inputGradient(x,y,k) - exp(logSoftMax(x,y,k)) * sum(inputGradient(x,y,0) ... inputGradient(x,y,N-1))
 *                      where N is the number of feature channels
 *
 *              The incoming gradient is the primary source.
 *              The original output of corresponding logSoftMax is the secondary source.
 *
 */
MPS_CLASS_AVAILABLE_STARTING( macos(10.13.4), ios(11.3), macCatalyst(13.0), tvos(11.3))
@interface MPSCNNLogSoftMaxGradient : MPSCNNGradientKernel

/*!
 *  @abstract   Initializes a MPSCNNLogSoftMaxGradient function
 *  @param      device                          The MTLDevice on which this MPSCNNLogSoftMaxGradient filter will be used
 *
 *  @return     A valid MPSCNNLogSoftMaxGradient object or nil, if failure.
 */
-(nonnull instancetype) initWithDevice: (nonnull id <MTLDevice>) device NS_DESIGNATED_INITIALIZER;

/*! @abstract NSSecureCoding compatability
 *  @discussion While the standard NSSecureCoding/NSCoding method
 *              -initWithCoder: should work, since the file can't
 *              know which device your data is allocated on, we
 *              have to guess and may guess incorrectly.  To avoid
 *              that problem, use initWithCoder:device instead.
 *  @param      aDecoder    The NSCoder subclass with your serialized MPSKernel
 *  @param      device      The MTLDevice on which to make the MPSKernel
 *  @return     A new MPSKernel object, or nil if failure.
 */
-(nullable instancetype) initWithCoder:(NSCoder * __nonnull)aDecoder
                                device:(nonnull id <MTLDevice>) device NS_DESIGNATED_INITIALIZER;

@end    /* MPSCNNLogSoftMaxGradient */

#ifdef __cplusplus
}
#endif

    
#endif /* MPSCNNSoftMax_h */
