/*
     File: AVCamViewController.m
 Abstract: A view controller that coordinates the transfer of information between the user interface and the capture manager.
  Version: 1.2
 
 Disclaimer: IMPORTANT:  This Apple software is supplied to you by Apple
 Inc. ("Apple") in consideration of your agreement to the following
 terms, and your use, installation, modification or redistribution of
 this Apple software constitutes acceptance of these terms.  If you do
 not agree with these terms, please do not use, install, modify or
 redistribute this Apple software.
 
 In consideration of your agreement to abide by the following terms, and
 subject to these terms, Apple grants you a personal, non-exclusive
 license, under Apple's copyrights in this original Apple software (the
 "Apple Software"), to use, reproduce, modify and redistribute the Apple
 Software, with or without modifications, in source and/or binary forms;
 provided that if you redistribute the Apple Software in its entirety and
 without modifications, you must retain this notice and the following
 text and disclaimers in all such redistributions of the Apple Software.
 Neither the name, trademarks, service marks or logos of Apple Inc. may
 be used to endorse or promote products derived from the Apple Software
 without specific prior written permission from Apple.  Except as
 expressly stated in this notice, no other rights or licenses, express or
 implied, are granted by Apple herein, including but not limited to any
 patent rights that may be infringed by your derivative works or by other
 works in which the Apple Software may be incorporated.
 
 The Apple Software is provided by Apple on an "AS IS" basis.  APPLE
 MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
 THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
 FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
 OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
 
 IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
 OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
 MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
 AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
 STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
 POSSIBILITY OF SUCH DAMAGE.
 
 Copyright (C) 2011 Apple Inc. All Rights Reserved.
 
 */

#import "AVCamViewController.h"
#import "AVCamCaptureManager.h"
#import "AVCamRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
#import "UIImage+Resize.h"
#import "UIImageUtil.h"
#import "NSTimer+Blocks.h"
#import "SettingViewController.h"
#import "InfoViewController.h"

static void *AVCamFocusModeObserverContext = &AVCamFocusModeObserverContext;

@interface AVCamViewController () <UIGestureRecognizerDelegate>
@end

@interface AVCamViewController (InternalMethods)
- (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates;
- (void)tapToAutoFocus:(UIGestureRecognizer *)gestureRecognizer;
- (void)tapToContinouslyAutoFocus:(UIGestureRecognizer *)gestureRecognizer;
- (void)updateButtonStates;
@end

@interface AVCamViewController (AVCamCaptureManagerDelegate) <AVCamCaptureManagerDelegate>
@end

@implementation AVCamViewController

@synthesize roomName;
@synthesize contentView;
@synthesize captureManager;
@synthesize stillButton;
@synthesize rectView;
@synthesize lightLabel;
@synthesize statusLabel;
@synthesize recomendLabel;
@synthesize roomLabel;
@synthesize videoPreviewView;
@synthesize captureVideoPreviewLayer;

- (NSString *)stringForFocusMode:(AVCaptureFocusMode)focusMode
{
	NSString *focusString = @"";
	
	switch (focusMode) {
		case AVCaptureFocusModeLocked:
			focusString = @"locked";
			break;
		case AVCaptureFocusModeAutoFocus:
			focusString = @"auto";
			break;
		case AVCaptureFocusModeContinuousAutoFocus:
			focusString = @"continuous";
			break;
	}
	
	return focusString;
}

- (void)dealloc
{
    [_unit release];
    [self removeObserver:self forKeyPath:@"captureManager.videoInput.device.focusMode"];
	[captureManager release];
    [videoPreviewView release];
	[captureVideoPreviewLayer release];
    [stillButton release];	

    [rectView release];
    [lightLabel release];
    [contentView release];
    [statusLabel release];
    [recomendLabel release];
    [roomLabel release];
    [super dealloc];
}

- (NSString*)statusForRoomLight:(CGFloat)light
{
    if (nil == roomName) {
        return @"状态";
    }
    
    _lightType = eLightType0;
    
    NSInteger thresHold1 = 300;
    NSInteger thresHold2 = 150;
    NSInteger thresHold3 = 100;
    NSInteger thresHold4 = 50;
    if ([roomName isEqualToString:@"起居室"]) {
        thresHold1 = 300;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"餐厅"]) {
        thresHold1 = 300;
        thresHold2 = 150;
        thresHold3 = 75;
        thresHold4 = 50;
    } else if ([roomName isEqualToString:@"书房"]) {
        thresHold1 = 500;
        thresHold2 = 300;
        thresHold3 = 200;
        thresHold4 = 100;
    } else if ([roomName isEqualToString:@"卧室"]) {
        thresHold1 = 150;
        thresHold2 = 75;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"厨房"]) {
        thresHold1 = 300;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"卫生间"]) {
        thresHold1 = 200;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"儿童房"]) {
        thresHold1 = 300;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"玄关"]) {
        thresHold1 = 300;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"家庭影音室"]) {
        thresHold1 = 150;
        thresHold2 = 75;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"衣帽间"]) {
        thresHold1 = 200;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    } else if ([roomName isEqualToString:@"储藏间"]) {
        thresHold1 = 150;
        thresHold2 = 100;
        thresHold3 = 50;
        thresHold4 = 25;
    }
    
    if (light >= thresHold1) {
        _lightType = eLightType1;
        return @"当前光照等级: 过度明亮";
    } else if (light >= thresHold2) {
        _lightType = eLightType2;
        return @"当前光照等级: 舒适";
    } else if (light >= thresHold3) {
        _lightType = eLightType2;
        return @"当前光照等级: 基本满足";
    } else if (light >= thresHold4) {
        _lightType = eLightType4;
        return @"当前光照等级: 偏暗";
    } else {
        _lightType = eLightType5;
        return @"当前光照等级: 过暗";
    }
    
    return @"状态";
}

- (NSString*) recommend
{
    NSString * recStr = @"建议和意见 :";
    
    switch (_lightType) {
        case eLightType1:
            return [recStr stringByAppendingString:@"\n请降低亮度"];
            break;
            
        case eLightType2:
            return [recStr stringByAppendingString:@"\n亮度刚好，请保持"];
            break;
            
        case eLightType3:
            return [recStr stringByAppendingString:@"\n可以略微提高亮度"];
            break;
            
        case eLightType4:
            return [recStr stringByAppendingString:@"\n亮度偏暗，请提高室内亮度"];
            break;
            
        case eLightType5:
            return [recStr stringByAppendingString:@"\n亮度过暗，有害眼睛"];
            break;
            
        default:
            break;
    }
    
    return recStr;
}

- (void) updateLabelStatus
{
    if ([_unit isEqualToString:@"lux"]) {
        lightLabel.text = [NSString stringWithFormat:@"亮度:%.2flux", _luxLight];
    } else {
        lightLabel.text = [NSString stringWithFormat:@"亮度:%.2ffc", _luxLight*0.0929];
    }
    
    statusLabel.text = [self statusForRoomLight:_luxLight];
    recomendLabel.text = [self recommend];
}

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
    if (self) {
        _unit = @"lux";
        _luxLight = -1;
    }
    return self;
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    
    NSString * curUnit = [[NSUserDefaults standardUserDefaults] objectForKey:@"keyUnit"];
    if (curUnit && ![curUnit isEqualToString:_unit]) {
        [_unit release];
        _unit = [curUnit retain];
        if (_luxLight > 0) {
            [self updateLabelStatus];
        }
    }
}

- (void)viewDidLoad
{
    [super viewDidLoad];

    rectView.layer.borderWidth = 1.0f;
    rectView.layer.borderColor = [UIColor redColor].CGColor;
    
    roomLabel.text = roomName;
    lightLabel.layer.cornerRadius = 5;
    
	if ([self captureManager] == nil) {
		AVCamCaptureManager *manager = [[AVCamCaptureManager alloc] init];
		[self setCaptureManager:manager];
		[manager release];
		
		[[self captureManager] setDelegate:self];

		if ([[self captureManager] setupSession]) 
        {
            CGPoint convertedFocusPoint = [self convertToPointOfInterestFromViewCoordinates:self.rectView.center];
            [self.captureManager flashMode:AVCaptureFlashModeOff];
            [self.captureManager continuousFocusAtPoint:convertedFocusPoint];
            [self.captureManager exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:convertedFocusPoint];
            //[self.captureManager whiteBalanceeMode:AVCaptureWhiteBalanceModeLocked];
            
            // Create video preview layer and add it to the UI
			AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:[[self captureManager] session]];
			UIView *view = [self videoPreviewView];
			CALayer *viewLayer = [view layer];
			[viewLayer setMasksToBounds:YES];
			
			CGRect bounds = [view bounds];
			[newCaptureVideoPreviewLayer setFrame:bounds];
			
			if ([newCaptureVideoPreviewLayer isOrientationSupported]) {
				[newCaptureVideoPreviewLayer setOrientation:AVCaptureVideoOrientationPortrait];
			}
			
			[newCaptureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
			
			[viewLayer insertSublayer:newCaptureVideoPreviewLayer below:[[viewLayer sublayers] objectAtIndex:0]];
			
			[self setCaptureVideoPreviewLayer:newCaptureVideoPreviewLayer];
            [newCaptureVideoPreviewLayer release];
			
            // Start the session. This is done asychronously since -startRunning doesn't return until the session is running.
			dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
				[[[self captureManager] session] startRunning];
			});
			
            [self updateButtonStates];
			
            // Create the focus mode UI overlay
			UILabel *newFocusModeLabel = [[UILabel alloc] initWithFrame:CGRectMake(10, 10, viewLayer.bounds.size.width - 20, 20)];
			[newFocusModeLabel setBackgroundColor:[UIColor clearColor]];
			[newFocusModeLabel setTextColor:[UIColor colorWithRed:1.0 green:1.0 blue:1.0 alpha:0.50]];
			AVCaptureFocusMode initialFocusMode = [[[captureManager videoInput] device] focusMode];
			[newFocusModeLabel setText:[NSString stringWithFormat:@"focus: %@", [self stringForFocusMode:initialFocusMode]]];
			[view addSubview:newFocusModeLabel];
			[self addObserver:self forKeyPath:@"captureManager.videoInput.device.focusMode" options:NSKeyValueObservingOptionNew context:AVCamFocusModeObserverContext];
            [newFocusModeLabel release];
            
            // Add a single tap gesture to focus on the point tapped, then lock focus
//			UITapGestureRecognizer *singleTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapToAutoFocus:)];
//			[singleTap setDelegate:self];
//			[singleTap setNumberOfTapsRequired:1];
//			[view addGestureRecognizer:singleTap];
//			
//            // Add a double tap gesture to reset the focus mode to continuous auto focus
//			UITapGestureRecognizer *doubleTap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapToContinouslyAutoFocus:)];
//			[doubleTap setDelegate:self];
//			[doubleTap setNumberOfTapsRequired:2];
//			[singleTap requireGestureRecognizerToFail:doubleTap];
//			[view addGestureRecognizer:doubleTap];
//			
//			[doubleTap release];
//			[singleTap release];
		}		
	}
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if (context == AVCamFocusModeObserverContext) {
        // Update the focus UI overlay string when the focus mode changes
	} else {
        [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
    }
}

- (CGFloat) calImageLight:(UIImage*)image forRect:(CGRect)rect
{
    CGFloat aveY = 0;
    
    UIImage * resizeImg = [image resizedImage:CGSizeMake(640, 960) interpolationQuality:kCGInterpolationHigh];
    UIImage * cropImg = [resizeImg croppedImage:rect];
    if (cropImg) 
    {
        int width = cropImg.size.width;
        int height = cropImg.size.height;
        
        uint32_t * origData = [UIImageUtil getimageData:cropImg];
        uint32_t * data = origData;
        CGFloat sumY = 0;
        for (int j = 0; j < height; ++j) 
        {
            CGFloat rowY = 0;
            for (int i = 0; i < width; ++i) {
                uint8_t * eachPixel = (uint8_t*)data;
                CGFloat y = 0.299*eachPixel[3] + 0.587*eachPixel[2] + 0.114*eachPixel[1];
                rowY += y;
                ++data;
            }
            sumY += (rowY/width);
        }
        aveY = sumY/height;
        
        //NSLog(@"width = %d, height = %d, aveY = %f", width, height, aveY);
        
        free(origData);
    }
    return aveY;
}

- (CGFloat) interpolateLightValue:(CGFloat)grayVal
{
    return 0.000002*grayVal*grayVal*grayVal - 0.002099*grayVal*grayVal + 1.91*grayVal;
}

- (void) captureAndCompute:(BOOL)isContinue
{
    if (!isContinue) {
        [_timer invalidate];
        _timer = nil;
    }

}

- (void) stopCapture
{
    [_timer invalidate];
    _timer = nil;
    
    self.stillButton.selected = NO;
}


#pragma mark Toolbar Actions

- (IBAction)toggleCamera:(id)sender
{
    // Toggle between cameras when there is more than one
    [[self captureManager] toggleCamera];
    
    // Do an initial focus
    //[[self captureManager] continuousFocusAtPoint:CGPointMake(.5f, .5f)];
}

- (IBAction)back {
    [self.navigationController popViewControllerAnimated:YES];
}

- (IBAction)settingClick
{
    SettingViewController * settingVC = [[SettingViewController alloc] initWithNibName:@"SettingViewController" bundle:nil];
    settingVC.modalTransitionStyle = UIModalTransitionStyleFlipHorizontal;
    [self presentModalViewController:settingVC animated:YES];
    [settingVC release];
}

- (IBAction)showInfo {
    InfoViewController * infoVC = [[InfoViewController alloc] initWithNibName:@"InfoViewController" bundle:nil];
    infoVC.modalTransitionStyle = UIModalTransitionStyleFlipHorizontal;
    [self presentModalViewController:infoVC animated:YES];
    [infoVC release];
}

- (IBAction)toggleRecording:(id)sender
{
    // Start recording if there isn't a recording running. Stop recording if there is.
    if (![[[self captureManager] recorder] isRecording])
        [[self captureManager] startRecording];
    else
        [[self captureManager] stopRecording];
}

- (IBAction)captureStillImage:(id)sender
{
    // Capture a still image
    [[self stillButton] setEnabled:NO];
    [[self captureManager] captureStillImage];
    
    // Flash the screen white and fade it out to give UI feedback that a still image was taken
    UIView *flashView = [[UIView alloc] initWithFrame:[[self videoPreviewView] frame]];
    [flashView setBackgroundColor:[UIColor whiteColor]];
    [[[self view] window] addSubview:flashView];
    
    [UIView animateWithDuration:.4f
                     animations:^{
                         [flashView setAlpha:0.f];
                     }
                     completion:^(BOOL finished){
                         [flashView removeFromSuperview];
                         [flashView release];
                     }
     ];
}

- (IBAction)timerClick {
    static BOOL sCanStart = YES;
    if (sCanStart)
    {
        sCanStart = NO;
        self.view.window.userInteractionEnabled = NO;
        
        UILabel * lab = [[[UILabel alloc] initWithFrame:self.rectView.bounds] autorelease];
        lab.backgroundColor = [UIColor colorWithRed:.2f green:.2f blue:.2f alpha:.75f];
        lab.textColor = [UIColor whiteColor];
        lab.textAlignment = UITextAlignmentCenter;
        lab.text = @"5";
        lab.font = [UIFont boldSystemFontOfSize:38];
        [self.rectView addSubview:lab];
        
        [NSTimer scheduledTimerWithTimeInterval:1 block:^{
            lab.text = @"4";
        } repeats:NO];
        [NSTimer scheduledTimerWithTimeInterval:2 block:^{
            lab.text = @"3";
        } repeats:NO];
        [NSTimer scheduledTimerWithTimeInterval:3 block:^{
            lab.text = @"2";
        } repeats:NO];
        [NSTimer scheduledTimerWithTimeInterval:4 block:^{
            lab.text = @"1";
        } repeats:NO];
        [NSTimer scheduledTimerWithTimeInterval:5 block:^{
            [lab removeFromSuperview];
            [self captureStillImage:nil];
            self.view.window.userInteractionEnabled = YES;
            sCanStart = YES;
        } repeats:NO];
    }
}

- (void)viewDidUnload {
    [self setRectView:nil];
    [self setLightLabel:nil];
    [self setContentView:nil];
    [self setStatusLabel:nil];
    [self setRecomendLabel:nil];
    [self setRoomLabel:nil];
    [super viewDidUnload];
}
@end

@implementation AVCamViewController (InternalMethods)

// Convert from view coordinates to camera coordinates, where {0,0} represents the top left of the picture area, and {1,1} represents
// the bottom right in landscape mode with the home button on the right.
- (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates 
{
    CGPoint pointOfInterest = CGPointMake(.5f, .5f);
    CGSize frameSize = [[self videoPreviewView] frame].size;
    
    if ([captureVideoPreviewLayer isMirrored]) {
        viewCoordinates.x = frameSize.width - viewCoordinates.x;
    }    

    if ( [[captureVideoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize] ) {
		// Scale, switch x and y, and reverse x
        pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
    } else {
        CGRect cleanAperture;
        for (AVCaptureInputPort *port in [[[self captureManager] videoInput] ports]) {
            if ([port mediaType] == AVMediaTypeVideo) {
                cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
                CGSize apertureSize = cleanAperture.size;
                CGPoint point = viewCoordinates;

                CGFloat apertureRatio = apertureSize.height / apertureSize.width;
                CGFloat viewRatio = frameSize.width / frameSize.height;
                CGFloat xc = .5f;
                CGFloat yc = .5f;
                
                if ( [[captureVideoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspect] ) {
                    if (viewRatio > apertureRatio) {
                        CGFloat y2 = frameSize.height;
                        CGFloat x2 = frameSize.height * apertureRatio;
                        CGFloat x1 = frameSize.width;
                        CGFloat blackBar = (x1 - x2) / 2;
						// If point is inside letterboxed area, do coordinate conversion; otherwise, don't change the default value returned (.5,.5)
                        if (point.x >= blackBar && point.x <= blackBar + x2) {
							// Scale (accounting for the letterboxing on the left and right of the video preview), switch x and y, and reverse x
                            xc = point.y / y2;
                            yc = 1.f - ((point.x - blackBar) / x2);
                        }
                    } else {
                        CGFloat y2 = frameSize.width / apertureRatio;
                        CGFloat y1 = frameSize.height;
                        CGFloat x2 = frameSize.width;
                        CGFloat blackBar = (y1 - y2) / 2;
						// If point is inside letterboxed area, do coordinate conversion. Otherwise, don't change the default value returned (.5,.5)
                        if (point.y >= blackBar && point.y <= blackBar + y2) {
							// Scale (accounting for the letterboxing on the top and bottom of the video preview), switch x and y, and reverse x
                            xc = ((point.y - blackBar) / y2);
                            yc = 1.f - (point.x / x2);
                        }
                    }
                } else if ([[captureVideoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
					// Scale, switch x and y, and reverse x
                    if (viewRatio > apertureRatio) {
                        CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
                        xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2; // Account for cropped height
                        yc = (frameSize.width - point.x) / frameSize.width;
                    } else {
                        CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
                        yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2); // Account for cropped width
                        xc = point.y / frameSize.height;
                    }
                }
                
                pointOfInterest = CGPointMake(xc, yc);
                break;
            }
        }
    }
    
    return pointOfInterest;
}

// Auto focus at a particular point. The focus mode will change to locked once the auto focus happens.
- (void)tapToAutoFocus:(UIGestureRecognizer *)gestureRecognizer
{
    if ([[[captureManager videoInput] device] isFocusPointOfInterestSupported]) {
        CGPoint tapPoint = [gestureRecognizer locationInView:[self videoPreviewView]];
        CGPoint convertedFocusPoint = [self convertToPointOfInterestFromViewCoordinates:tapPoint];
        [captureManager autoFocusAtPoint:convertedFocusPoint];
    }
}

// Change to continuous auto focus. The camera will constantly focus at the point choosen.
- (void)tapToContinouslyAutoFocus:(UIGestureRecognizer *)gestureRecognizer
{
    if ([[[captureManager videoInput] device] isFocusPointOfInterestSupported])
        [captureManager continuousFocusAtPoint:CGPointMake(.5f, .5f)];
}

// Update button states based on the number of available cameras and mics
- (void)updateButtonStates
{
	NSUInteger cameraCount = [[self captureManager] cameraCount];
    
    CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^(void) {
        if (cameraCount < 2) {
            //[[self cameraToggleButton] setEnabled:NO]; 
            if (cameraCount < 1) {
                [[self stillButton] setEnabled:NO];
            } else {
                [[self stillButton] setEnabled:YES];
            }
        } else {
            //[[self cameraToggleButton] setEnabled:YES];
            [[self stillButton] setEnabled:YES];
        }
    });
}

@end

@implementation AVCamViewController (AVCamCaptureManagerDelegate)


- (void)captureManager:(AVCamCaptureManager *)captureManager didFailWithError:(NSError *)error
{
    CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^(void) {
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
                                                            message:[error localizedFailureReason]
                                                           delegate:nil
                                                  cancelButtonTitle:NSLocalizedString(@"OK", @"OK button title")
                                                  otherButtonTitles:nil];
        [alertView show];
        [alertView release];
    });
}

- (void)captureManagerStillImageCaptured:(AVCamCaptureManager *)captureManager capturedImage:(UIImage *)image addtionInfo:(NSDictionary *)info
{    
    NSNumber * expose = [info objectForKey:@"ExposureTime"];
    NSNumber * fNumber = [info objectForKey:@"FNumber"];
    CGFloat val = 100*[expose floatValue]/[fNumber floatValue];
    NSLog(@"val = %f", val);
    val = powf(10, val)/10;
    NSLog(@"ExposureTime = %@, fNumber = %@, val = %f", expose, fNumber, val);
    
    CGFloat light = [self calImageLight:image forRect:rectView.frame];
    _luxLight = [self interpolateLightValue:light/val];
    
    [self updateLabelStatus];
    
    CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^(void) {
        [[self stillButton] setEnabled:YES];
    });
}

- (void)captureManagerDeviceConfigurationChanged:(AVCamCaptureManager *)captureManager
{
	[self updateButtonStates];
}

@end
