//
//  EAGLCameraView.mm
//  OpenGLGame
//
//  Created by Nguyen Cong Huy on 11/23/12.
//  Copyright (c) 2012 UIT. All rights reserved.
//

#import "EAGLCameraView.h"

#import "AcvFileReader.h"
#include "LBGraphics.h"
#import  "OCImage.h"
#include "LBTexture.h"
#include "LBUtils.h"
#include "LBFilter.h"
#include "LBFramebuffer.h"
#include "LBWhiteBalanceFilter.h"
#include "LBMonoChromeFilter.h"
#include "LBSharpenFilter.h"
#include "LBBoxBlurFilter.h"
#include "LBAdaptiveThresholdFilter.h"
#include "LBVintageLowFilter.h"
#include "LBInstantEffectFilter.h"
#include "LBRadialGradientFilter.h"
#include "LBAutoContrastFilter.h"
#include "LBRadialGradientTextureFilter.h"
#include "LBOldFilter.h"
#include "LBColdFilter.h"
#include "LBNewAnselFilter.h"
#include "LBDianaFilter.h"
#include "LBNewInstantFilter.h"
#include "LBLakeFilter.h"
#include "LBLokoFilter.h"
#include "LBOld2Filter.h"
#include "LBBeautifulSkinFilter.h"
#include "Utils.h"
#include "LBFaceSkinDetect.h"
#include "LBVintageHighFilter.h"
#include "LBAnsenFilter.h"
#include "LBSutroFilter.h"
#include "LBXXproFilter.h"
#include "LBXLofiFilter.h"
#include "LBXWaldenFilter.h"
#include "LBXNashVilleFilter.h"
#include "LBXLordKelvinFilter.h"
#include "LBXLomoFilter.h"
#include "LBXBWFilter.h"
#include "LBXRiseFilter.h"
#include "LBNormalBlendTextureFilter.h"
#include "LBFrameFilter.h"
#include "LBNoDoFilter.h"
#include "LBXOldTimeFilter.h"

#define USE_DEPTH_BUFFER 1
#define DEGREES_TO_RADIANS(__ANGLE) ((__ANGLE) / 180.0 * M_PI)

#define TIME_PER_ROTATE 0.25f // (s)
#define DELTA_ANGLE     90/TIME_PER_ROTATE
#define MAX_TIME_SAMPLE 10
#define TIME_SAMPLE_IGNORE 0.2f
#define MAX_DELTA_TIME 0.5f
#define LIMIT_FACE 10
#define ZOOM 1

#define NUM_FRAME 12

// A class extension to declare private methods
@interface EAGLCameraView (){
    AVCaptureDevice* captureDevice;
    
    LBTexture* tex;
    LBTexture* texCapture;                   // create when capture, and delete when return capturing.
    LBTexture* blendTexture;
    LBTexture* blackBoardBlendTexture;
    
    LBTexture* frameTexture;
    
    LBFilter* filter;
    LBFrameFilter* frameFilter;
    LBBeautifulSkinFilter* beautifulSkinFilter;
    LBAutoContrastFilter* autoContrastFilter;
    LBToneCurveFilter* toneCurvesFilter;
    
    float scaleInNormal;
    float scaleInRotateOneTime;
    float scale;
    
    float angle;                                        // 0, 1, 2, 3  ~ 0, 90, 180, 270 degree
    
    // variable for smooth rotate
    float targetAngle;
    float angleChanged;         // angle changed from begin rotating point
    float angleChangedTarget;   // angle target from begin rotating point
    bool isNeedCallDidFinishRotate;
    float captureAngle;
    
    // face detection
    float *faceBoundData;       //x, y, width, height, xLeftEye, yRightEye, xRightEye, yRightEye, xMouth, yMouth
    int numFace;
    UIColor* averageSkinColor;
    
    float deltaScale;
    int histogram[256];
    // END variable for smooth rotate
    
    double endTime, beginTime, t;
    int numTimeSample;
    double times[MAX_TIME_SAMPLE];
    
    NSArray* frameNames;
    AVCaptureSession *captureSession;
    AVCaptureDeviceInput *captureInput;
    bool isEnableFrame;
    
    CVOpenGLESTextureCacheRef _videoTextureCache;
    CVOpenGLESTextureRef cvTexture;
    LBFramebuffer* captureFramebuffer;
    CMSampleBufferRef sampleBufferRef;
    
    bool isRendering;
    NSNumber* isRenderingLock;
    bool isNeedUpdateUi;
    bool isNeedCapture;
    bool isFixWidth;
    bool isDelaying;
    bool isNeedDeleteOpenGlData;
    CGPoint lastestFocusPoint;
    
    NSTimer* flashTimer;
}

@property (nonatomic, retain) EAGLContext *context;
@property (nonatomic, assign) NSTimer *animationTimer;
@property (nonatomic, retain) UIActivityIndicatorView* activityIndicator;
@property (nonatomic, retain) UIColor* averageSkinColor;
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic) bool isEnableFrame;
@property (nonatomic, assign) bool isNeedUpdateUi;
@property (nonatomic, assign) float targetAngle;

- (BOOL) createFramebuffer;
- (void) destroyFramebuffer;
- (void)prepairOpenGLAndResource;
- (void) calculateNewSizeWithTexture:(LBTexture*) texture;
- (void)faceDetection;
- (void) addActivityIndicator;
- (void) removeActivityIndicator;
//- (void) loadFrameTextures;
- (LBTexture*) createTexture:(NSString*) fileName;
- (void) inputTexture;
- (void)receiveTexture;
- (void) setIsRenderingToFalse;
- (void) captureWithFlashDelay;
- (void) setFocusPoint:(CGPoint)point;
- (void) setFocusPointValue:(NSValue*)value;
- (void) setFlashOnOff:(NSNumber*)nsIsFlashOn;
- (void) stopFirstFlash;
- (void) flashSecond;

@end


@implementation EAGLCameraView

@synthesize context;
@synthesize animationTimer;
@synthesize animationInterval;
@synthesize imageRect;
@synthesize delegate;
@synthesize inputImg;
@synthesize isUseSmoothSkin;
@synthesize averageSkinColor;
@synthesize frameIndex;
@synthesize isAllowCrop;
@synthesize isEnableFrame;
@synthesize captureSession;
@synthesize isNeedUpdateUi;
@synthesize targetAngle;
@synthesize isFlashOn;

// You must implement this method
+ (Class)layerClass {
    return [CAEAGLLayer class];
}

//Created GlView
- (id)initWithFrame:(CGRect)frame {
  if ((self = [super initWithFrame:frame])) {
    // Get the layer
    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
      
      eaglLayer.contentsScale = [UIScreen mainScreen].scale;
      self.contentScaleFactor = [UIScreen mainScreen].scale;
    
    eaglLayer.opaque = YES;
    eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
    
    context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    
    if (!context || ![EAGLContext setCurrentContext:context]) {
        [self release];
        return nil;
    }
    
    animationInterval = 1.0 / 60.0;
    [self viewportX:0 y:0 width:self.frame.size.width*eaglLayer.contentsScale height:self.frame.size.height*eaglLayer.contentsScale andScreenSizeWidth:self.frame.size.width screenSizeHeight:self.frame.size.height];
    
      
      faceBoundData = 0;
      blackBoardBlendTexture = 0;
      
      numTimeSample = 0;
      deltaScale = 0.0f;
      scale = 1.0f;
      tex = 0;
      texCapture = 0;
      effectIndex = -1;
      frameIndex = 0;
      frameTextures = 0;
      frameTexture = 0;
      isAllowCrop = true;
      isNeedCallDidFinishRotate = false;
      isNeedRenderFrameFilter = false;
      isEnableFrame = false;
      isRendering = false;
      isNeedUpdateUi = false;
      isNeedCapture = false;
      isFlashOn = false;
      isUseBackCamera = true;
      isUsingCamera = true;
      isDelaying = false;
      isNeedDeleteOpenGlData = true;
      lastestFocusPoint = CGPointMake(0.5, 0.5);
      
      frameNames = [[NSArray alloc] initWithObjects:
                    @"f12.png",
                    @"f11.png",
                    @"lb_lofi_border.png",
                    @"lb_lord_kelvin_border.png",
                    @"lb_black_border.png",
                    @"f1.png",
                    @"lb_xpro2_border.png",
                    @"f6.png",
                    @"f7.png",
                    @"f8.png",
                    @"f9.png",
                    @"f10.png",
                    nil];
      
      [self prepairOpenGLAndResource];
      [self initCaptureSession];
      isRenderingLock = [[NSNumber alloc] init];
  }
  return self;
}

- (void) initCaptureSession{
    
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, context, NULL, &_videoTextureCache);
#else
    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)_context, NULL, &_videoTextureCache);
#endif
    
    if (err){
        NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
        return;
    }
    
    captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    captureInput = [AVCaptureDeviceInput
                                          deviceInputWithDevice:captureDevice
                                          error:nil];
    
    AVCaptureVideoDataOutput *captureOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    captureOutput.alwaysDiscardsLateVideoFrames = YES;
    
    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [captureOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [captureOutput setVideoSettings:videoSettings];
    
    self.captureSession = [[[AVCaptureSession alloc] init] autorelease];
    
    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:captureOutput];
    
    
    if([Utils is568Height]){
        [self.captureSession setSessionPreset:AVCaptureSessionPreset1280x720];
        isFixWidth = true;
    }
    else{
        [self.captureSession setSessionPreset:AVCaptureSessionPreset640x480];
        isFixWidth = false;
    }
    [self setFocusPoint:CGPointMake(0.5, 0.5)];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    
    @synchronized(isRenderingLock){
    
        NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
        
        if(isRendering == true) return;
        isRendering = true;
    
        sampleBufferRef = sampleBuffer;

        [self performSelectorOnMainThread:@selector(receiveTexture) withObject:nil waitUntilDone:YES];
    
        [pool drain];
    }
}

- (void)receiveTexture{
    
    logMemUsage();
    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
    
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
    
    //Get information about the image
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    if (!_videoTextureCache)
    {
        NSLog(@"No video texture cache");
        return;
    }
    
    glActiveTexture(GL_TEXTURE0);
    CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                _videoTextureCache,
                                                                imageBuffer,
                                                                NULL,
                                                                GL_TEXTURE_2D,
                                                                GL_RGBA,
                                                                width,
                                                                height,
                                                                GL_BGRA,
                                                                GL_UNSIGNED_BYTE,
                                                                0,
                                                                &cvTexture);
    if (err){
        NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
    }
    
    unsigned int texId = CVOpenGLESTextureGetName(cvTexture);
    LBTexture* texture = new LBTexture(texId, width, height);
    
    float rotatingAngle = -90;
    float textureRatio = self.frame.size.width/self.frame.size.height;
    
    float newTextureWidth, newTextureHeight;
    
    if(isFixWidth){
        newTextureWidth = height/textureRatio;
        newTextureHeight = height;
    }
    else{
        newTextureWidth = width;
        newTextureHeight = width*textureRatio;
    }
    
    float xScale = -1.0f;
        
    
    if(isNeedCapture){
        [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
        UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
        [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
        
        if(deviceOrientation == UIDeviceOrientationPortrait)
            rotatingAngle = -90;
        else if(deviceOrientation == UIDeviceOrientationLandscapeRight)
            rotatingAngle = 0;
        else if(deviceOrientation == UIDeviceOrientationPortraitUpsideDown)
            rotatingAngle = 90;
        else if(deviceOrientation == UIDeviceOrientationLandscapeLeft)
            rotatingAngle = 180;
    }
    
    if(isNeedCapture == false && isUseBackCamera == false)      // flip image when capturing using front camera.
        xScale = 1.0f;
    
    if(rotatingAngle == 90 || rotatingAngle == -90){
        if(isFixWidth){
            newTextureHeight = height/textureRatio;
            newTextureWidth = height;
        }
        else{
            newTextureHeight = width;
            newTextureWidth = width*textureRatio;
        }
    }
    
    // create tex rotate, to right orientation
    DELETE_SAFELY(tex);
    tex = new LBTexture(newTextureWidth, newTextureHeight);
    LBFramebuffer* fbo = new LBFramebuffer(tex);
    fbo->beginRenderToTexture();
    texture->setLocationByCenter(newTextureWidth/2, newTextureHeight/2);
    texture->setAngle(rotatingAngle);
    texture->setScale(xScale, 1.0f);
    texture->renderUseDefaultProgram();
    texture->setIsDeleteTexIdWhenDeleteObj(false);
    DELETE_SAFELY(texture);
    DELETE_SAFELY(fbo);
    
    
    // stop capture session when call capture
    if(isNeedCapture){
        [captureSession stopRunning];
        [self startAnimation];
        isNeedCapture = false;
        isUsingCamera = false;
        captureAngle = 0;
        
        //create origin texture
        DELETE_SAFELY(texCapture);
        texCapture = new LBTexture(tex->getWidth(), tex->getHeight());
        LBFramebuffer* fbo = new LBFramebuffer(texCapture);
        fbo->beginRenderToTexture();
        tex->renderUseDefaultProgram();
        DELETE_SAFELY(fbo);
        
        if(delegate && [delegate respondsToSelector:@selector(didCaptureImage)])
           [delegate didCaptureImage];
    }
    
    
    [self inputTexture];
    
    isNeedUpdateUi = true;
    
    [self loop];
    
    //clean up
    if(cvTexture != 0){
        CFRelease(cvTexture);
        cvTexture = 0;
    }
    CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
    
    [self performSelector:@selector(setIsRenderingToFalse) withObject:nil afterDelay:0.01];
    
    [pool drain];
}

- (void) setIsRenderingToFalse{
    @synchronized(isRenderingLock){
        isRendering = false;
    }
}

- (void)viewportX: (int)x y:(int)y width:(int) width height:(int) height andScreenSizeWidth:(int) widthScreen screenSizeHeight:(int)heightScreen{
    LBGraphics::getInstance()->setScreenSizeAndViewportInRealDevice(widthScreen, heightScreen, x, y, width, height);
}

- (void)prepairOpenGLAndResource {
    //glEnable(GL_TEXTURE_2D);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glDisable(GL_DEPTH_TEST);
    
    /*** need ***/
    LBGraphics* g = LBGraphics::getInstance();
    g->init();
    /*** END need ***/

    OCImage* ocImg;
    
    ocImg = [[OCImage alloc] initWithFileName:@"blend.png"];
    blendTexture = new LBTexture([ocImg imageData], [ocImg width], [ocImg height], [ocImg byteInAPixel]);
    blendTexture->setLocationAndScale(0.0f, 0.0f, 1.0f, 1.0f);
    [ocImg release];
    
//    [self loadFrameTextures];
    
//    frameFilter = new LBFrameFilter();
    
    autoContrastFilter = new LBAutoContrastFilter();
    if(SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"5.0")){
        beautifulSkinFilter = new LBBeautifulSkinFilter();
    }

    [self effect:0];
}

- (void)loop{
    
    // calculate timer
    endTime = CACurrentMediaTime();
    double tSample = endTime - beginTime;
    beginTime = CACurrentMediaTime();
    
    if(tSample < TIME_SAMPLE_IGNORE){
        if(numTimeSample < MAX_TIME_SAMPLE){
            numTimeSample++;
            times[numTimeSample - 1] = tSample;
        }
        else{
            memcpy(times, times + 1, sizeof(double)*numTimeSample);
            times[MAX_TIME_SAMPLE - 1] = tSample;
        }
    
        t=0;
        for(int i = 0; i < numTimeSample; i++){
            t+= times[i];
        }
        t = t/numTimeSample;
    }
    else if(tSample < MAX_DELTA_TIME){
        t = tSample;
    }
    else t = MAX_DELTA_TIME;
    // END calculate timer
    
    [self update];
    
    if(isNeedUpdateUi)
        [self drawView];
    
}

- (void)update{
//    NSLog(@"Begin update");
    
    if(isNeedRenderFrameFilter){
        frameFilter->render();
        isNeedRenderFrameFilter = false;
    }
    
    if(isNeedCallDidFinishRotate){
        [delegate didFinishRotate:self];
        isNeedCallDidFinishRotate = false;
    }
    
    if(angleChanged < angleChangedTarget){          //rotate counter clock
        self.isNeedUpdateUi = true;
        angle += DELTA_ANGLE * t;
        angleChanged += DELTA_ANGLE * t;
        
        scale += deltaScale*t;
        if((angle > 0 && angle <= 90) || (angle > 180 && angle < 270)){
            deltaScale = (scaleInRotateOneTime - scaleInNormal)/TIME_PER_ROTATE;
        }
        else{
            deltaScale = (scaleInNormal - scaleInRotateOneTime)/TIME_PER_ROTATE;
        }
        if(angle > 360) angle = 1;
        if(angleChanged > angleChangedTarget){                //stop rotate
            isNeedCallDidFinishRotate = true;
            angle = targetAngle;
            scale = self.scaleFinal;
            angleChangedTarget = angleChanged = 0;
        }
    }
//    NSLog(@"End update");
    
}

- (void)drawView {

//    NSLog(@"Begin drawView");
    if(tex == 0) return;
    // require
    [EAGLContext setCurrentContext:context];
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
    // END require
    
    // require to use LBGraphics class
    GLint defFBO;
    glGetIntegerv(GL_FRAMEBUFFER_BINDING, &defFBO);
    LBGraphics::getInstance()->setDefaulFrameBuffer(defFBO);
    // END require to use LBGraphics class
    
    // DRAW HERE
    LBGraphics* g = LBGraphics::getInstance();
    g->renderToScreen();
    g->clear(0.2f, 0.2f, 0.2f);
    
    
    LBTexture* outputTexture = [self outputTexture];
    
    if(outputTexture){
        outputTexture->setLocationByCenter(self.frame.size.width/2, self.frame.size.height/2);
        outputTexture->setAngle(angle);
        outputTexture->setScale(scale, scale);
        outputTexture->renderUseDefaultProgram();
    }

    //END DRAW HERE
  
    // require
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
    
    [context presentRenderbuffer:GL_RENDERBUFFER_OES];                  //show the render buffer
    // END require

    isNeedUpdateUi = false;
//    NSLog(@"End drawView");
}


- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
    if(delegate && [delegate respondsToSelector:@selector(eaglViewTouchesBegan:withEvent:)]){
        [delegate eaglViewTouchesBegan:touches withEvent:event];
    }
    
    UITouch* touch = [[touches allObjects] objectAtIndex:0];
    CGPoint touchPoint = [touch locationInView:self];
    CGPoint focusPoint = CGPointMake(touchPoint.y/self.frame.size.height, 1.0f - touchPoint.x/self.frame.size.width);

    [self setFocusPoint:focusPoint];
    lastestFocusPoint = focusPoint;

}

- (void)setFocusPoint:(CGPoint)point{
    NSError* error = nil;
    
    if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus] &&
        [captureDevice isFocusPointOfInterestSupported]){
        if ([captureDevice lockForConfiguration:&error]) {
            [captureDevice setFocusPointOfInterest:point];
            [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
            
            if([captureDevice isExposurePointOfInterestSupported] && [captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
                [captureDevice setExposurePointOfInterest:point];
                [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
            }
            
            [captureDevice unlockForConfiguration];
        } else {
            NSLog(@"Error : Not support focus: %@", error);
        }
    }
}

- (void)setFocusPointValue:(NSValue *)value{
    CGPoint focusPoint = [value CGPointValue];
    [self setFocusPoint:focusPoint];
}

- (void)layoutSubviews {
    [EAGLContext setCurrentContext:context];
    [self destroyFramebuffer];
    [self createFramebuffer];
    if(tex != 0){
        [self drawView];
    }
    [self.captureSession startRunning];
}


- (BOOL)createFramebuffer {
    
    glGenFramebuffersOES(1, &viewFramebuffer);
    glGenRenderbuffersOES(1, &viewRenderbuffer);
    
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
    [context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(CAEAGLLayer*)self.layer];
    glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
    
    glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
    glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
    
    if (USE_DEPTH_BUFFER) {
        glGenRenderbuffersOES(1, &depthRenderbuffer);
        glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
        glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
        glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
    }
    
    if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
        NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
        return NO;
    }
    
    return YES;
}


- (void)destroyFramebuffer {
    
    glDeleteFramebuffersOES(1, &viewFramebuffer);
    viewFramebuffer = 0;
    glDeleteRenderbuffersOES(1, &viewRenderbuffer);
    viewRenderbuffer = 0;
    
    if(depthRenderbuffer) {
        glDeleteRenderbuffersOES(1, &depthRenderbuffer);
        depthRenderbuffer = 0;
    }
}


- (void)startAnimation {
    self.animationTimer = [NSTimer scheduledTimerWithTimeInterval:animationInterval target:self selector:@selector(loop) userInfo:nil repeats:YES];
    endTime = beginTime = CACurrentMediaTime();
}


- (void)stopAnimation {
    self.animationTimer = nil;
}


- (void)setAnimationTimer:(NSTimer *)newTimer {
    [animationTimer invalidate];
    animationTimer = newTimer;
}


- (void)setAnimationInterval:(NSTimeInterval)interval {
    
    animationInterval = interval;
    if (animationTimer) {
        [self stopAnimation];
        [self startAnimation];
    }
}

- (NSTimeInterval) animationInterval{
    return animationInterval;
}

-(void) deleteOpenGLData{
    if(isNeedDeleteOpenGlData == false)
        return;
    DELETE_SAFELY(filter);
    DELETE_SAFELY(autoContrastFilter);
    DELETE_SAFELY(tex);
    DELETE_SAFELY(blendTexture);
    DELETE_SAFELY(frameFilter);
    DELETE_SAFELY(texCapture);
    DELETE_SAFELY(beautifulSkinFilter);
    DELETE_SAFELY(blackBoardBlendTexture);
    DELETE_SAFELY_ARRAY(faceBoundData);
    
    if(frameTextures!=0){
        for(int i = 0; i < NUM_FRAME; i++){
            DELETE_SAFELY(frameTextures[i]);
        }
    }
    DELETE_SAFELY_ARRAY(frameTextures);
    DELETE_SAFELY(frameTexture);
    DELETE_SAFELY(texCapture);
    
    isNeedDeleteOpenGlData = false;
}

- (void)dealloc {
    CFRelease(_videoTextureCache);
    NSLog(@"eaglView.retainCount = %d", self.retainCount);
    [inputImg release];
    [averageSkinColor release];
    [self deleteOpenGLData];
    
    [self stopAnimation];
    
    if ([EAGLContext currentContext] == context) {
        [EAGLContext setCurrentContext:nil];
    }
    [self destroyFramebuffer];
    [context release];  
    [super dealloc];
}



- (void) effect:(int) index{
    if(effectIndex == index  && filter != 0){
        [self renderFilter];
        return;
    }
    effectIndex  = index;
    isNeedUpdateUi = true;
    DELETE_SAFELY(filter);
    DELETE_SAFELY(blackBoardBlendTexture);
    switch (index) {
            
        default:
        {
            filter = new LBNoDoFilter();
        }
            break;
            
        case 1:
        {
//            filter = new LBXRiseFilter();
//            OCImage* curvesImage = [[OCImage alloc] initWithFileName:@"lb_rise_curves_map.png"];
//            ((LBXRiseFilter*)filter)->setToneData([curvesImage imageData]);
//            [curvesImage release];
//            
//            OCImage* blendImage = [[OCImage alloc] initWithFileName:@"blackboard.png"];;
//            blackBoardBlendTexture = new LBTexture(blendImage.imageData, blendImage.width, blendImage.height, blendImage.byteInAPixel);
//            [blendImage release];
//            ((LBXRiseFilter*)filter)->setBlendTexture(blackBoardBlendTexture);
            filter = new LBVintageHighFilter();
            break;
        }
            
        case 2:
        {
//            filter = new LBXXproFilter();
//            OCImage* curvesImage = [[OCImage alloc] initWithFileName:@"xpro_curves_map.png"];;
//            ((LBXXproFilter*)filter)->setToneData([curvesImage imageData]);
//            [curvesImage release];
            filter = new LBLokoFilter();
            break;
        }
        case 3:
        {
            filter = new LBXLofiFilter();
            OCImage* lofiCurvesImage = [[OCImage alloc] initWithFileName:@"lofi_curves_map.png"];;
            ((LBXLofiFilter*)filter)->setToneData([lofiCurvesImage imageData]);
            [lofiCurvesImage release];
            break;
        }
        case 4:
        {
            filter = new LBXWaldenFilter();
            OCImage* waldenCurvesImage = [[OCImage alloc] initWithFileName:@"walden_curves_map.png"];
            ((LBXWaldenFilter*)filter)->setToneData([waldenCurvesImage imageData]);
            [waldenCurvesImage release];
            break;
        }
        case 5:
        {
            filter = new LBXNashVilleFilter();
            OCImage* nashvilleCurvesImage = [[OCImage alloc] initWithFileName:@"nash_ville_curves_map.png"];
            ((LBXNashVilleFilter*)filter)->setToneData([nashvilleCurvesImage imageData]);
            [nashvilleCurvesImage release];
            break;
        }
        case 6:
        {
            filter = new LBXLordKelvinFilter();
            OCImage* curvesImage = [[OCImage alloc] initWithFileName:@"lord_kelvin_curves_map.png"];
            ((LBXLordKelvinFilter*)filter)->setToneData([curvesImage imageData]);
            [curvesImage release];
            break;
        }
        case 7:
        {
            filter = new LBXLomoFilter();
            AcvFileReader* acvFileReader = [[AcvFileReader alloc] initWithACV:@"xlomo_curves"];
            ((LBXLomoFilter*)filter)->setToneAcvData(acvFileReader.toneCurveByteArray);
            [acvFileReader release];
            break;
        }
            
        case 8:
        {
            filter = new LBXOldTimeFilter();
            AcvFileReader* acvFileReader = [[AcvFileReader alloc] initWithACV:@"bw_curves"];
            ((LBXOldTimeFilter*)filter)->setToneAcvData(acvFileReader.toneCurveByteArray);
            [acvFileReader release];
            
            OCImage* blendImage = [[OCImage alloc] initWithFileName:@"blackboard.png"];
            blackBoardBlendTexture = new LBTexture(blendImage.imageData, blendImage.width, blendImage.height, blendImage.byteInAPixel);
            ((LBXOldTimeFilter*)filter)->setBlendTexture(blackBoardBlendTexture);
            [blendImage release];
            break;
        }
            
        case 9:
        {
            filter = new LBXBWFilter();
            AcvFileReader* acvFileReader = [[AcvFileReader alloc] initWithACV:@"bw_curves"];
            ((LBXBWFilter*)filter)->setToneAcvData(acvFileReader.toneCurveByteArray);
            [acvFileReader release];
            break;
        }
    }
//    self.frameIndex = effectIndex - 1;
    [self renderFilter];
}

- (void) renderFilter{
    if(filter == 0) return;
    if(isUseSmoothSkin == true && SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"5.0") == true){
        filter->setTexture(beautifulSkinFilter->getOutputTexture());
        filter->setLocationAndScale(0, 0, 1.0f, 1.0f);
        filter->render();
    }
    else if(tex != 0){
        filter->setTexture(tex);
        filter->setLocationAndScale(0, 0, 1.0f, 1.0f);
        filter->render();
    }
}

- (LBTexture*) outputTexture{
    if(filter != 0)
        return filter->getFinalOutputTexture();
    return tex;
}

- (void) calculateNewSizeWithTexture:(LBTexture*) texture{
    float widthScreen = self.frame.size.width;
    float heightScreen = self.frame.size.height;
    float widthTexture, heightTexture;
    if(targetAngle == 0 || targetAngle == 180){
        widthTexture = texture->getWidth();
        heightTexture = texture->getHeight();
    }
    else{
        widthTexture = texture->getHeight();
        heightTexture = texture->getWidth();

    }
    float ratioScreen = (float)widthScreen/heightScreen;
    float ratioTexture = (float)widthTexture/heightTexture;
    
    
    float scaleFinal;
    if(ratioTexture > ratioScreen){
        imageRect.size.width = widthScreen;
        imageRect.origin.x = 0.0f;
        imageRect.size.height = imageRect.size.width/ratioTexture;
        imageRect.origin.y = (heightScreen-imageRect.size.height)/2.0f;
        scaleFinal = (float)imageRect.size.width/widthTexture;
    }
    else{
        imageRect.size.height = heightScreen;
        imageRect.origin.y = 0.0f;
        imageRect.size.width = imageRect.size.height*ratioTexture;
        imageRect.origin.x = (widthScreen-imageRect.size.width)/2.0f;
        scaleFinal = (float)imageRect.size.height/heightTexture;
    }
    
    if(targetAngle == 0 || targetAngle == 180){
        scaleInNormal = scaleFinal;
    }
    else{
        scaleInRotateOneTime = scaleFinal;
    }
}

- (void) rotate{
    self.targetAngle+=90;
    if(targetAngle == 360) self.targetAngle = 0;
    captureAngle += 90;
    if(captureAngle == 360) captureAngle = 0;
    angleChangedTarget += 90;
    [self calculateNewSizeWithTexture:tex];
    
}

- (CGRect)imageRectByParentOfParent{
    return CGRectMake(imageRect.origin.x + self.frame.origin.x, imageRect.origin.y + self.frame.origin.y, imageRect.size.width, imageRect.size.height);
}

//rect is image real size, imageRect is size
- (void)saveImageFinalTo:(NSString*)filePath rect:(CGRect) cropRect{
    
    CGRect imageRectByParent = [self imageRectByParentOfParent];
    CGRect rect = CGRectMake((cropRect.origin.x - imageRectByParent.origin.x) / self.scaleFinal, 
                              (cropRect.origin.y - imageRectByParent.origin.y) / self.scaleFinal,
                              cropRect.size.width/self.scaleFinal,
                              cropRect.size.height / self.scaleFinal);
    
    LBTexture* outputTexture = [self outputTexture];
    
    LBTexture* saveTexture = new LBTexture(rect.size.width, rect.size.height);
    LBFramebuffer* fbo = new LBFramebuffer(saveTexture);
    
    fbo->beginRenderToTexture();
    LBGraphics::getInstance()->clear(1.0f, 1.0f, 1.0f);
    outputTexture->setLocationByCenter(rect.size.width/2, rect.size.height/2);
    outputTexture->setAngle(targetAngle);
    
    if(targetAngle == 0)
        outputTexture->sourceRect(rect.origin.x, rect.origin.y, rect.size.width, rect.size.height);
    else if(targetAngle == 90)
        outputTexture->sourceRect(outputTexture->getWidth() - rect.origin.y - rect.size.height, rect.origin.x, rect.size.height, rect.size.width);
    else if(targetAngle == 180)
        outputTexture->sourceRect(outputTexture->getWidth() - rect.origin.x - rect.size.width, outputTexture->getHeight() - rect.origin.y - rect.size.height, rect.size.width, rect.size.height);
    else if(targetAngle == 270)
        outputTexture->sourceRect(rect.origin.y, outputTexture->getHeight() - rect.origin.x - rect.size.width, rect.size.height, rect.size.width);
    
    outputTexture->setLocationByCenter(rect.size.width/2.0f, rect.size.height/2.0f);
    outputTexture->setScale(1.0f, -1.0f);
    outputTexture->renderUseDefaultProgram();
    
    outputTexture->resetSourceRect();
    
    OCImage *ocImgSave = [[OCImage alloc] initWithTexture:saveTexture];
    [ocImgSave saveToPngFile:filePath];
    [ocImgSave release];
    DELETE_SAFELY(saveTexture);
    DELETE_SAFELY(fbo);
}

- (void)saveImageFinalTo:(NSString*)filePath{
    
    LBTexture* outputTexture = [self outputTexture];
    
    int saveTextureWidth, saveTextureHeight;
    if(targetAngle == 0 || targetAngle == 180){
        saveTextureWidth = outputTexture->getWidth();
        saveTextureHeight = outputTexture->getHeight();
    }
    else{
        saveTextureWidth = outputTexture->getHeight();
        saveTextureHeight = outputTexture->getWidth();
    }
    
    LBTexture* saveTexture = new LBTexture(outputTexture->getWidth(), outputTexture->getHeight());
    LBFramebuffer* fbo = new LBFramebuffer(saveTexture);
    
    fbo->beginRenderToTexture();
    LBGraphics::getInstance()->clear(1.0f, 1.0f, 1.0f);
    outputTexture->setLocationByCenter(outputTexture->getWidth()/2, outputTexture->getHeight());
    outputTexture->setAngle(targetAngle);
    
    
    outputTexture->setLocationByCenter(outputTexture->getWidth()/2.0f, outputTexture->getHeight()/2.0f);
    outputTexture->setScale(-1.0f, 1.0f);
    outputTexture->renderUseDefaultProgram();
    
    outputTexture->resetSourceRect();
    
    OCImage *ocImgSave = [[OCImage alloc] initWithTexture:saveTexture];
    [ocImgSave saveToPngFile:filePath];
    [ocImgSave release];
    DELETE_SAFELY(saveTexture);
    DELETE_SAFELY(fbo);
}

- (UIImage*) getCaptureImage{
    if(texCapture == 0)
        return nil;
    
    int saveTextureWidth, saveTextureHeight;
    if(captureAngle == 0 || captureAngle == 180){
        saveTextureWidth = texCapture->getWidth();
        saveTextureHeight = texCapture->getHeight();
    }
    else{
        saveTextureWidth = texCapture->getHeight();
        saveTextureHeight = texCapture->getWidth();
    }

    // because in begin, texture is in horizontal, and I rotate it 90
    LBTexture* saveTexture = new LBTexture(saveTextureWidth, saveTextureHeight);
    LBFramebuffer* fbo = new LBFramebuffer(saveTexture);
    
    fbo->beginRenderToTexture();
    LBGraphics::getInstance()->clear(1.0f, 1.0f, 1.0f);
    texCapture->setAngle(captureAngle);
    
    
    texCapture->setLocationByCenter(saveTextureWidth/2.0f, saveTextureHeight/2.0f);
    texCapture->setScale(1.0f, 1.0f);
    texCapture->renderUseDefaultProgram();
    
    texCapture->resetSourceRect();
    
    OCImage *ocImgSave = [[[OCImage alloc] initWithTexture:saveTexture] autorelease];
    DELETE_SAFELY(saveTexture);
    DELETE_SAFELY(fbo);
    UIImage* result = [ocImgSave createUiImage];
    return result;
}

- (CGRect)imageRectReal{
    return CGRectMake(0, 0, tex->getWidth(), tex->getHeight());
}

- (void)inputImage:(UIImage *)image isFirstTime:(bool)isFirstTime{
    angle = 0;
    targetAngle = 0;
    
    self.inputImg = image;
    self.isUseSmoothSkin = false;
    OCImage* ocImg = [[OCImage alloc] initWithUIImage:self.inputImg];
    LBTexture* texture = new LBTexture([ocImg imageData], [ocImg width], [ocImg height], [ocImg byteInAPixel]);
    createHistogram([ocImg imageData], [ocImg width]*[ocImg height], histogram);
    [ocImg release];
    
    autoContrastFilter->setHistogram(histogram);
    
    texture->setLocationAndScale(0.0f, 0.0f, 1.0f, 1.0f);
    
    DELETE_SAFELY(tex);
    tex = texture;
    [self calculateNewSizeWithTexture:tex];
    scale = self.scaleFinal;
    
    numFace = 0;
    DELETE_SAFELY_ARRAY(faceBoundData);
    
    [self effect:effectIndex];
    self.frameIndex = 0;
    if(isFirstTime)
        captureAngle = 0;
}

- (void) inputTexture{
    
    //reset angle, angle after capture is rotated 90 degree
    angle = 0;
    targetAngle = 0;
    
    [self calculateNewSizeWithTexture:tex];
    scale = self.scaleFinal;
    
    numFace = 0;
    DELETE_SAFELY_ARRAY(faceBoundData);
    

    [self renderFilter];
}

- (void)faceDetection{
    if(SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"5.0")){
        if(delegate && [delegate respondsToSelector:@selector(shouldFaceDetection:)]) [delegate shouldFaceDetection:self];
        
        //face detection
        CIImage *ciimage = [[CIImage alloc] initWithCGImage:self.inputImg.CGImage options:nil];
        NSDictionary *options = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
        CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:options];
        NSArray *features = [detector featuresInImage:ciimage];
        [ciimage release];
        
        DELETE_SAFELY_ARRAY(faceBoundData);
        numFace = features.count;
        NSLog(@"numFace: %d", numFace);
        if(numFace > LIMIT_FACE) numFace = LIMIT_FACE;
        faceBoundData = new float[numFace*NUM_DATA_IN_A_FACE];
        
        CGRect bigestRect = CGRectMake(0, 0, 0, 0);
        
        int i = 0;
        for (CIFaceFeature *feature in features)
        {
            CGRect faceBound = [feature bounds];
            
            if(faceBound.size.width > bigestRect.size.width) bigestRect = faceBound;
            
            faceBoundData[i] = faceBound.origin.x/self.inputImg.size.width;
            faceBoundData[i + 1] = faceBound.origin.y/self.inputImg.size.height;
            faceBoundData[i + 2] = faceBound.size.width / self.inputImg.size.width;
            faceBoundData[i + 3] = faceBound.size.height / self.inputImg.size.height;
            faceBoundData[i + 4] = feature.leftEyePosition.x/self.inputImg.size.width;
            faceBoundData[i + 5] = feature.leftEyePosition.y/ self.inputImg.size.height;
            faceBoundData[i + 6] = feature.rightEyePosition.x/self.inputImg.size.width;
            faceBoundData[i + 7] = feature.rightEyePosition.y/ self.inputImg.size.height;
            faceBoundData[i + 8] = feature.mouthPosition.x/self.inputImg.size.width;
            faceBoundData[i + 9] = feature.mouthPosition.y/ self.inputImg.size.height;
            
            i+=NUM_DATA_IN_A_FACE;
            if(i > NUM_DATA_IN_A_FACE*LIMIT_FACE - 1) break;
        }
        
        // calcualte average skin
        if(numFace > 0){
            CGRect faceBound = bigestRect;
            float newWidth = faceBound.size.width*0.3;
            float newHeight = faceBound.size.height*0.3;
            float newX = faceBound.origin.x + (faceBound.size.width - newWidth)/2;
            float newY = faceBound.origin.y + (faceBound.size.height - newHeight)/2;
            self.averageSkinColor = [Utils averageColorOfImage:self.inputImg rect:CGRectMake(newX, self.inputImg.size.height - newY - newHeight, newWidth, newHeight)];
            //self.averageSkinColor = [Utils averageColorOfImage:self.inputImg rect:CGRectMake(newX, newY, newWidth, newHeight)];
        }
        else{
            self.averageSkinColor = [UIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1.0];
        }
        
        const CGFloat* components = CGColorGetComponents(averageSkinColor.CGColor);
        NSLog(@"Red: %f, Green: %f, Blue %f", components[0], components[1], components[2]);
        beautifulSkinFilter->setAverageSkinColor(components[0], components[1], components[2]);
        // END calcualte average skin
        
        if(delegate && [delegate respondsToSelector:@selector(didFaceDetection:)]) [delegate didFaceDetection:self];
        isNeedUpdateUi = true;
    }
}

- (void)updateOpenGLSize{
    LBGraphics* g = LBGraphics::getInstance();
    g->setScreenSizeAndViewportInRealDevice(self.frame.size.width, self.frame.size.height, 0, 0, self.frame.size.width, self.frame.size.height);
    [self calculateNewSizeWithTexture:tex];
}
- (float)scaleFinal{
    if(targetAngle == 0 || targetAngle == 180){
        return scaleInNormal;
    }
    else{
        return scaleInRotateOneTime;
    }
}

- (bool)isFaceDetected{
    if(faceBoundData != 0) return true;
    return false;
}

- (void)setIsUseSmoothSkin:(bool)_isUseSmoothSkin{
    isUseSmoothSkin = _isUseSmoothSkin;
    if(SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"5.0")){
        if(isUseSmoothSkin == true){
            if([self isFaceDetected] == false){
                [self faceDetection];
                beautifulSkinFilter->setFaceBoundData(numFace, faceBoundData);
                if(tex != 0)
                    beautifulSkinFilter->setTexture(tex);
                beautifulSkinFilter->setLocationAndScale(0, 0, 1.0f, 1.0f);
                beautifulSkinFilter->render();
            }
            if(filter != 0 && tex != 0){
                filter->setTexture(beautifulSkinFilter->getOutputTexture());
                filter->setLocationAndScale(0, 0, 1.0f, 1.0f);
                filter->render();
            }
        }
        else{
            if(filter != 0 && tex != 0){
                filter->setTexture(tex);
                filter->setLocationAndScale(0, 0, 1.0f, 1.0f);
                filter->render();
            }
        }
    }
    isNeedUpdateUi = true;
}

- (void)setStandardBrightness:(float)standardBrightness{
    if(beautifulSkinFilter != 0){
        beautifulSkinFilter->setStandardBrightness(standardBrightness);
    }
}
- (void) setBlurSize:(float)blurSize{
    if(beautifulSkinFilter != 0){
        beautifulSkinFilter->setBlurSize(blurSize);
    }
}

- (float) blurSizeOfBeautifulFilter{
    if(beautifulSkinFilter != 0)
        return beautifulSkinFilter->getBlurSize();
    return 0;
}

- (void) setSharpness:(float)sharpness{
    if(beautifulSkinFilter != 0){
        beautifulSkinFilter->setSharpness(sharpness);
    }
}

- (void) applySmoothSkinValue{
    if(beautifulSkinFilter != 0){
        beautifulSkinFilter->render();
        [self renderFilter];
    }
}

- (void) updateCropRect:(CGRect)cropRect{
    CGRect imageRectByParent = [self imageRectByParentOfParent];
    CGRect tempRect = CGRectMake((cropRect.origin.x - imageRectByParent.origin.x)/imageRectByParent.size.width,
                                 (cropRect.origin.y - imageRectByParent.origin.y)/imageRectByParent.size.height,
                                 cropRect.size.width/imageRectByParent.size.width,
                                 cropRect.size.height/imageRectByParent.size.height);
    CGRect rect = CGRectMake(tempRect.origin.x, 1.0f - tempRect.origin.y - tempRect.size.height, tempRect.size.width, tempRect.size.height);
    if(targetAngle == 0){
        frameFilter->setFrameSize(rect.origin.x, rect.origin.y, rect.size.width, rect.size.height);
    }
    else if(targetAngle == 90){
        frameFilter->setFrameSize(rect.origin.y, 1.0f - rect.origin.x - rect.size.width, rect.size.height, rect.size.width);
    }
    else if(targetAngle == 180){
        frameFilter->setFrameSize(1.0f - rect.origin.x - rect.size.width, 1.0 - rect.origin.y - rect.size.height, rect.size.width, rect.size.height);
    }
    else if(targetAngle == 270){
        frameFilter->setFrameSize(1.0f - rect.origin.y - rect.size.height, rect.origin.x, rect.size.height, rect.size.width);
    }
    
//    [self renderFilter];
    isNeedRenderFrameFilter = true;
}

- (LBTexture*) createTexture:(NSString*) fileName{
    OCImage* ocImg = [[OCImage alloc] initWithFileName:fileName];
    LBTexture* texture = new LBTexture([ocImg imageData], [ocImg width], [ocImg height], [ocImg byteInAPixel]);
    texture->setLocationAndScale(0.0f, 0.0f, 1.0f, 1.0f);
    [ocImg release];
    return texture;
}

//- (void) loadFrameTextures{
//    if(frameTextures == 0) frameTextures = new LBTexture*[NUM_FRAME];
//    
//    frameTextures[0] = [self createTexture:@"f1.png"];
//    frameTextures[1] = [self createTexture:@"lb_black_border.png"];
//    frameTextures[2] = [self createTexture:@"lb_lord_kelvin_border.png"];
//    frameTextures[3] = [self createTexture:@"lb_xpro2_border.png"];
//    frameTextures[4] = [self createTexture:@"lb_lofi_border.png"];
//    frameTextures[5] = [self createTexture:@"f6.png"];
//    frameTextures[6] = [self createTexture:@"f7.png"];
//    frameTextures[7] = [self createTexture:@"f8.png"];
//    frameTextures[8] = [self createTexture:@"f9.png"];
//    frameTextures[9] = [self createTexture:@"f10.png"];
//    frameTextures[10] = [self createTexture:@"f11.png"];
//    frameTextures[11] = [self createTexture:@"f12.png"];
//}

// set frame index, load all frame texture from begin

-(void)setFrameIndex:(int)_frameIndex{
    frameIndex = _frameIndex;
    
    isAllowCrop = true;
    if(isEnableFrame == false || frameIndex < 0){
        if(filter != 0) filter->addTarget(0);
    }
    else{
        
        //load frame texture
        DELETE_SAFELY(frameTexture);
        frameTexture = [self createTexture:[frameNames objectAtIndex:frameIndex]];
        
        filter->addTarget(frameFilter);
        if (frameFilter == 0) {
            frameFilter = new LBFrameFilter();
        }
        frameFilter->setFrameTexture(frameTexture);
        frameFilter->setTextureSize(0.0f, 0.0f, 1.0f, 1.0f);
    }
}

- (void)setTargetAngle:(float)_targetAngle{
    targetAngle = _targetAngle;
    if(isNeedUpdateUi == false && animationTimer.isValid == false)
        self.isNeedUpdateUi = true;
}


- (void)capture{
    
    if ([captureDevice hasTorch] && isFlashOn) {
        [self setFlashOnOff:[NSNumber numberWithBool:true]];
        [self performSelector:@selector(stopFirstFlash) withObject:nil afterDelay:0.2];     // first flash
    }
    
    else isNeedCapture = true;
}

- (void) captureWithFlashDelay{
    
    isNeedCapture = true;
}

- (void) setFlashOnOff:(NSNumber*)nsIsFlashOn{
    bool isTurnOnFlash = [nsIsFlashOn boolValue];
    [captureDevice lockForConfiguration:nil];
    if(isTurnOnFlash)
        [captureDevice setTorchMode:AVCaptureTorchModeOn];
    else [captureDevice setTorchMode:AVCaptureTorchModeOff];
    [captureDevice unlockForConfiguration];
}

- (void)stopFirstFlash{
    [self setFlashOnOff:[NSNumber numberWithBool:false]];
    [self performSelector:@selector(flashSecond) withObject:nil afterDelay:0.01];           // between flash
}

- (void)flashSecond{
    [self setFlashOnOff:[NSNumber numberWithBool:true]];
//    [self performSelector:@selector(setFocusPointValue:) withObject:[NSValue valueWithCGPoint:lastestFocusPoint] afterDelay:0.1];
    [self performSelector:@selector(captureWithFlashDelay) withObject:nil afterDelay:1];    // second flash
}

-(void)backToCamera{
    DELETE_SAFELY(texCapture);
    [self.captureSession startRunning];
    [self stopAnimation];
    isUsingCamera = true;
    self.isUseSmoothSkin = false;
    
    [self setFocusPoint:CGPointMake(0.5, 0.5)];
}

- (void)rotateCamera{
    
	if([self hasFrontCamera] == false)
        return;
	
    NSError *error;
    AVCaptureDeviceInput *newVideoInput;
    AVCaptureDevicePosition currentCameraPosition = [[captureInput device] position];
    
    if (currentCameraPosition == AVCaptureDevicePositionBack){
        currentCameraPosition = AVCaptureDevicePositionFront;
        isUseBackCamera = false;
    }
    else{
        currentCameraPosition = AVCaptureDevicePositionBack;
        isUseBackCamera = true;
    }
    
    AVCaptureDevice *newDevice = nil;
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
	for (AVCaptureDevice *device in devices){
		if ([device position] == currentCameraPosition){
			newDevice = device;
		}
	}
    newVideoInput = [[[AVCaptureDeviceInput alloc] initWithDevice:newDevice error:&error] autorelease];
    
    if (newVideoInput != nil){
        [captureSession beginConfiguration];
        
        [captureSession removeInput:captureInput];
        if ([captureSession canAddInput:newVideoInput]){
            [captureSession addInput:newVideoInput];
            captureInput = newVideoInput;
            captureDevice = newDevice;
        }
        else{
            [captureSession addInput:captureInput];
        }
        [captureSession commitConfiguration];
    }
}

- (bool) hasFrontCamera{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
	
	for (AVCaptureDevice *device in devices){
		if ([device position] == AVCaptureDevicePositionFront)
			return true;
	}
    
    return false;
}

@end
