//
//  CamCaptureManager.m
//  CamH
//
//  Created by ghu on 12-7-19.
//  Copyright (c) 2012年 __MyCompanyName__. All rights reserved.
//

#import "CamCaptureManager.h"
#import "AssetsLibrary/AssetsLibrary.h"

static CamCaptureManager* theSingleton = nil;

@interface CamCaptureManager()
{
    BOOL _enableVideoOutput;
};

@end

@implementation CamCaptureManager

@synthesize m_pSession;
@synthesize m_pVideoInput;
@synthesize m_pStillImageOutput;
@synthesize m_pVideoOutput;

@synthesize m_bEnableVideoOutput;
@synthesize m_orientation;

@synthesize delegate;

+(CamCaptureManager*) singleton
{
    if (theSingleton == nil)
    {
        theSingleton = [[CamCaptureManager alloc] init];
    }
    return theSingleton;
}

+(void) releaseTheSingleton
{
    if ( theSingleton != nil )
    {
        [theSingleton release];
        theSingleton = nil;
    }
}

-(CamCaptureManager*) init
{
    self = [super init];
    if ( self != nil )
    {
        _enableVideoOutput = NO;
        m_takePhotoState = TAKEPHOTO_STATE_IDLE;
        m_orientation = AVCaptureVideoOrientationPortrait;
        m_pFlashMode = AVCaptureFlashModeAuto;
    }
//    NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];

    return self;
}

/*
 Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are
 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA,
 except on iPhone 3G, where the supported pixel formats are kCVPixelFormatType_422YpCbCr8 and kCVPixelFormatType_32BGRA.
 */
- (BOOL) isVideoOutPut:(AVCaptureVideoDataOutput*) output SupportedVideoFormat:(OSType)type
{
    BOOL res = NO;
    NSArray *array = [output availableVideoCVPixelFormatTypes];
    for (NSUInteger i=0; i<[array count]; i++) {
        NSNumber *num = (NSNumber *)[array objectAtIndex:i];
        if ([num intValue] == type) {
            res = YES;
            break;
        }
    }
    return YES;
}


// 创建Input和Output(StillImageOutuput和VideoOutput)，并创建对应的session
-(BOOL) setupSession
{
    BOOL bSuccess = NO;
    
    // 设置前后camera的灯光模式
    if ( [[self backFacingCamera] hasFlash] )
    {
        if ([[self backFacingCamera] lockForConfiguration:nil]) 
        {
            if ([[self backFacingCamera] isFlashModeSupported:AVCaptureFlashModeAuto]) 
                [[self backFacingCamera] setFlashMode:AVCaptureFlashModeAuto];
            [[self backFacingCamera] unlockForConfiguration];            
        }
    }
    if ( [[self frontFacingCamera] hasFlash] )
    {
        if ([[self frontFacingCamera] lockForConfiguration:nil]) 
        {
            if ([[self frontFacingCamera] isFlashModeSupported:AVCaptureFlashModeAuto]) 
                [[self frontFacingCamera] setFlashMode:AVCaptureFlashModeAuto];
            [[self frontFacingCamera] unlockForConfiguration];            
        }
    }
    
    // 初始化输入设备
    m_pVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:nil];
   
    // 初始化静态画面输出设置
    m_pStillImageOutput= [[AVCaptureStillImageOutput alloc] init];
    [m_pStillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:nil];
    NSDictionary *outputSetting = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
    [m_pStillImageOutput setOutputSettings:outputSetting];
    [outputSetting release];
    
    // 初始化动态video输出设置
    m_pVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [m_pVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
    OSType type = kCVPixelFormatType_32BGRA;
    if (![self isVideoOutPut:m_pVideoOutput SupportedVideoFormat:type]) 
        type = kCVPixelFormatType_32RGBA;
    [m_pVideoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:type] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];    
    // 设置Caputre Buffer的回调函数
    [m_pVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
    [[m_pVideoOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:self.m_bEnableVideoOutput];
    
    // 创建session，并连接输入和输出
    m_pSession = [[AVCaptureSession alloc] init];
    // 为m_pSession创建观察者，如果m_pSession有变动(从stop变为start或反之)，就调用observeValueForKeyPath:ofObject:change:context
    [m_pSession addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:nil];
    [m_pSession beginConfiguration];
    
    if ([m_pSession canAddInput:m_pVideoInput]) {
        [m_pSession addInput:m_pVideoInput];
    }
    if ([m_pSession canAddOutput:m_pStillImageOutput]) {
        [m_pSession addOutput:m_pStillImageOutput];
    }    
    if ([m_pSession canAddOutput:m_pVideoOutput]) {
        [m_pSession addOutput:m_pVideoOutput];
    }
    
    [m_pSession commitConfiguration];
    
    [m_pSession beginConfiguration];
    self.m_pSession.sessionPreset = AVCaptureSessionPresetPhoto;
    [m_pSession commitConfiguration];
    
//    [self OnSwitchCamera];
    bSuccess = YES;
    return bSuccess;   
}

// 根据camera位置获取camera句柄
- (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in devices) {
        if ([device position] == position) {
            return device;
        }
    }
    return nil;
}

// 获取前置camera句柄
- (AVCaptureDevice *) frontFacingCamera
{
    return [self cameraWithPosition:AVCaptureDevicePositionFront];
}

// 获取后置camera句柄
- (AVCaptureDevice *) backFacingCamera
{
    return [self cameraWithPosition:AVCaptureDevicePositionBack];
}

-(AVCaptureDevicePosition)getCaptureDevicePosition
{
    return [[m_pVideoInput device] position];
}

- (void) onSwitchCamera
{
//    AVCaptureDevicePosition pos = [self getCaptureDevicePosition];
    
//    if (AVCaptureDevicePositionBack == pos) {
//        if (self.backCameraSupportedPresets == nil) {
//            self.backCameraSupportedPresets = [ARCCamUtilities getSupportedSessionPreset:self.session];
//        }
//        self.curCameraSupportedPresets = self.backCameraSupportedPresets;
//    }
//    else {
//        if (self.frontCameraSupportedPresets == nil) {
//            self.frontCameraSupportedPresets = [ARCCamUtilities getSupportedSessionPreset:self.session];
//        }
//        self.curCameraSupportedPresets = self.frontCameraSupportedPresets;
//    }
}


-(void)captureStillImage
{
    AVCaptureConnection *stillImageConnection = [CamUtility connectionWithMediaType:AVMediaTypeVideo fromConnections: self.m_pStillImageOutput.connections];
    if ([stillImageConnection isVideoOrientationSupported])
        [stillImageConnection setVideoOrientation:self.m_orientation];
    
    [self.m_pStillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) 
    {
        ALAssetsLibraryWriteImageCompletionBlock completionBlock = ^(NSURL *assetURL, NSError *error)
        {
            if (error)
            {
                if ([self.delegate respondsToSelector:@selector(captureManager:didFailWithError:)])
                    [self.delegate captureManager:self didFailWithError:error];
            }
        };
        
        if (imageDataSampleBuffer) {
            NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
            UIImage *image = [[UIImage alloc] initWithData:imageData];
            
            ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
            [library writeImageToSavedPhotosAlbum:[image CGImage] orientation:(ALAssetOrientation)[image imageOrientation] completionBlock:completionBlock];
            [image release];
            [library release];
        }
        else
            completionBlock(nil, error);
        
        if ([self.delegate respondsToSelector:@selector(captureManagerStillImageCaptured:didOutputSampleBuffer:withError:)])
        {
            [self.delegate captureManagerStillImageCaptured:self didOutputSampleBuffer:imageDataSampleBuffer withError:error];
        }
        self->m_takePhotoState = TAKEPHOTO_STATE_IDLE;
    } ];
}

//  using KVO to monitor the value of the capturingStillImage property of the AVCaptureStillImageOutput class
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{    
    if ([keyPath isEqualToString:@"capturingStillImage"]) {
        BOOL isCapturingStillImage = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
        if ([[self delegate] respondsToSelector:@selector(captureManager:changeStillImageCapturingState:)]) {
            [[self delegate] captureManager:self changeStillImageCapturingState:isCapturingStillImage];
        }
    }
    else if ([keyPath isEqualToString:@"running"]) {
        BOOL isSessionRunning = [[change objectForKey:NSKeyValueChangeNewKey] boolValue];
        if ([[self delegate] respondsToSelector:@selector(captureManager:changeSessionState:)]) {
            [[self delegate] captureManager:self changeSessionState:isSessionRunning];
        }
    }
    else
    {
        [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
    }
}
        
- (BOOL) hasFlash
{
    AVCaptureDevice *device = self.m_pVideoInput.device;
    return [device hasFlash];
}

- (BOOL) hasTorch
{
    AVCaptureDevice *device = self.m_pVideoInput.device;
    return [device hasTorch];
}

- (void) flashMode:(AVCaptureFlashMode)mode
{
    AVCaptureDevice *device = self.m_pVideoInput.device;
    if ([device hasFlash] && [device isFlashModeSupported:mode])  {
        NSError *error;
        if ([device lockForConfiguration:&error]) {
            [device setFlashMode:mode];
            [device unlockForConfiguration];
            self->m_pFlashMode = mode;
        }
        else {
            if ([[self delegate] respondsToSelector:@selector(captureManager:didFailWithError:)]) {
                [[self delegate] captureManager:self didFailWithError:error];
			}
        }
    }
}

- (void) torchMode:(AVCaptureTorchMode) mode
{
    AVCaptureDevice *device = self.m_pVideoInput.device;
    if ([device hasTorch] && [device isTorchModeSupported:mode]) {
        NSError *error;
        if ([device lockForConfiguration:&error]) {
            [device setTorchMode:mode];
            [device unlockForConfiguration];
        }
        else {
            if ([[self delegate] respondsToSelector:@selector(captureManager:didFailWithError:)]) {
                [[self delegate] captureManager:self didFailWithError:error];
			}
        }
    }
}


// 获取camera数量
- (NSUInteger) cameraCount
{
    return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
}

// 切换前后Camera
- (BOOL) toggleCamera
{
    BOOL success = NO;
    
    if ([self cameraCount] > 1) {
        NSError *error;
        AVCaptureDeviceInput *newVideoInput;
        AVCaptureDevicePosition position = [[m_pVideoInput device] position];
        
        if (position == AVCaptureDevicePositionBack)
            newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:&error];
        else if (position == AVCaptureDevicePositionFront)
            newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
        else
            goto bail;
        
        if (newVideoInput != nil) {
            [self.m_pSession beginConfiguration];
            [self.m_pSession removeInput:self.m_pVideoInput];
            if ([self.m_pSession canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
                self.m_pSession.sessionPreset = AVCaptureSessionPresetPhoto;                 
            }
            if ([self.m_pSession canAddInput:newVideoInput]) {
                [self.m_pSession addInput:newVideoInput];
                self.m_pVideoInput=newVideoInput;
            } else {
                [self.m_pSession addInput:self.m_pVideoInput];
            }
            [self.m_pSession commitConfiguration];
            success = YES;
            [newVideoInput release];
            [self onSwitchCamera];
        }
        else if (error)
        {
            if ([[self delegate] respondsToSelector:@selector(captureManager:didFailWithError:)]) {
                [[self delegate] captureManager:self didFailWithError:error];
            }
        }
    }
    
bail:
    return success;
}

// 回调函数
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    if ([self.delegate respondsToSelector:@selector(captureManagerPreview:didOutputSampleBuffer:)])
    {
        [self.delegate captureManagerPreview:self didOutputSampleBuffer:sampleBuffer];
    }
}

-(BOOL)m_bEnableVideoOutput
{
    return _enableVideoOutput;
}

-(void)setEnableVideoOutput:(BOOL)bEnable
{
    _enableVideoOutput = bEnable;
    [[self.m_pVideoOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:_enableVideoOutput];
}



@end











