#import "PYCaptureSessionManager.h"
#import "Constants.h"

@implementation PYCaptureSessionManager

@synthesize imageToCaptureCount = imageToCaptureCount_;
@synthesize images = images_;
@synthesize delegate;

static PYCaptureSessionManager *instance_;
dispatch_queue_t picture_saving_queue_;

#pragma mark - Initialization

- (id)init
{
	if (!(self = [super init]))
        return nil;
    
    NSArray *documentsDirectories = [[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask];
    
    for (NSURL *documentsDir in documentsDirectories)
    {
        NSLog(@"Found document directory: %@\n", documentsDir);
    }
    
    effectiveScale = 1.0;
    
    NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
    int32_t captureCount = [userDefaults integerForKey:kCaptureCount];
    
    self.imageToCaptureCount = captureCount > 0 ? captureCount : kDefaultPicturesToCapturePerTap; // The default number of images we want to capture
    queueOperationsInProgressCount_ = 0;
    pictureTakingOperationQueue_ = [[NSOperationQueue alloc] init];
    picture_saving_queue_ = dispatch_queue_create("picture_saving_queue", DISPATCH_QUEUE_SERIAL);
    images_ = [NSMutableArray array];
    assetLibrary_ = [[[ALAssetsLibrary alloc] init] retain];
    
    CMBufferCallbacks callbacks;// = malloc(sizeof(CMBufferCallbacks));        
    CMBufferGetTimeCallback getTimeCallback = &sampleBufferDurationCallback;
    callbacks.getDuration = getTimeCallback;
    callbacks.compare = NULL;
    callbacks.dataBecameReadyNotification = NULL;
    callbacks.getDecodeTimeStamp = NULL;
    callbacks.getPresentationTimeStamp = NULL;
    callbacks.isDataReady = NULL;
    callbacks.refcon = NULL;
    callbacks.version = 0;
    OSStatus bufferCreateResult = CMBufferQueueCreate(kCFAllocatorDefault, kMaxPicturesToCapturePerTap, &callbacks, &sampleBufferQueue_);
    NSLog(@"CMBufferQueueCreate returned OSStatus %ld\n", bufferCreateResult);
    
    [self setupAVCapture];
	
	return self;
}

+(PYCaptureSessionManager *)instance
{
    if(instance_ == nil)
        instance_ = [[PYCaptureSessionManager alloc] init];
    
    return instance_;
}

- (void)setupAVCapture
{
    NSError *error = nil;
	
	session = [AVCaptureSession new];
	[session setSessionPreset:AVCaptureSessionPresetPhoto];
	
    // Select a video device, make an input
	AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
	AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
	require( error == nil, bail );
	
	if ([session canAddInput:deviceInput])
		[session addInput:deviceInput];
	
    // Make a still image output
	stillImageOutput = [AVCaptureStillImageOutput new];
	if ([session canAddOutput:stillImageOutput])
		[session addOutput:stillImageOutput];
    
    [session startRunning];
    
    currentDevice = [UIDevice currentDevice];
    stillImageConnection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
    [stillImageConnection setVideoScaleAndCropFactor:effectiveScale];
    [stillImageOutput setOutputSettings:[NSDictionary dictionaryWithObject:AVVideoCodecJPEG forKey:AVVideoCodecKey]];
    
bail:

	if (error) 
    {
		UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
															message:[error localizedDescription]
														   delegate:nil 
												  cancelButtonTitle:@"Dismiss" 
												  otherButtonTitles:nil];
		[alertView show];
        [alertView release];
		[self teardownAVCapture];
    }
}

-(AVCaptureVideoPreviewLayer*)setVideoPreviewView:(UIView*)previewView
{
    AVCaptureVideoPreviewLayer *previewLayer = [[[AVCaptureVideoPreviewLayer alloc] initWithSession:session] autorelease];
	CALayer *rootLayer = [previewView layer];
	[rootLayer setMasksToBounds:YES];
	[previewLayer setFrame:[rootLayer bounds]];
	[rootLayer addSublayer:previewLayer];
    return previewLayer;
}

#pragma mark - Queue Processing

-(void)takePicturesWith:(pictureTaken)pictureTakenDelegate
{
    picture_taken_delegate_ = Block_copy(pictureTakenDelegate);
    
    imageRemainingToCaptureCount_ = self.imageToCaptureCount;
    imageRemainingToSaveCount_ = self.imageToCaptureCount;
    
    for (uint32_t i = 0; i < self.imageToCaptureCount; i++)
    {
        //NSLog(@"Taking picture %d\n", i);
        
        [pictureTakingOperationQueue_ addOperationWithBlock:^(void)
        {
            [self takePictureWithIndex:i];
        }];
    }
    
    NSLog(@"Finished enqueueing image capture\n");
}

-(void)takePictureWithIndex:(uint32_t)imageIndex
{
    UIDeviceOrientation deviceOrientation = [currentDevice orientation];
    AVCaptureVideoOrientation avcaptureOrientation = [self avOrientationForDeviceOrientation:deviceOrientation];
	[stillImageConnection setVideoOrientation:avcaptureOrientation];
    NSLog(@"Set video orientation to %d\n", avcaptureOrientation);
    
	[stillImageOutput 
     captureStillImageAsynchronouslyFromConnection:stillImageConnection
     completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
     {
         //NSLog(@"Async capture completed for %d\n", imageIndex);
         [self imageBufferWasCaptured:imageDataSampleBuffer :imageIndex :error];
         //NSLog(@"Got image from buffer\n");
     }];
}

-(void)imageBufferWasCaptured:(CMSampleBufferRef)imageDataSampleBuffer:(uint32_t)imageIndex:(NSError*)error
{
    NSLog(@"Received sample buffer with %ld samples. First sample size: %zd\n", CMSampleBufferGetNumSamples(imageDataSampleBuffer), CMSampleBufferGetSampleSize(imageDataSampleBuffer, 0));
    
    if (error)
    {
        OSAtomicDecrement32(&imageRemainingToSaveCount_);
        NSLog(@"Image capture failed with error: %@\n", error);
        //[self displayErrorOnMainQueue:error withMessage:@"Take picture failed"];
    }
    else 
    {
        OSStatus enqueuResult = CMBufferQueueEnqueue(sampleBufferQueue_, imageDataSampleBuffer); // The queue retains the reference for us
        
        if(enqueuResult != 0)
            NSLog(@"CMBufferQueueEnqueue resulted in OSStatus %ld for image at index %d\n", enqueuResult, imageIndex);
    }
    
    OSAtomicDecrement32(&imageRemainingToCaptureCount_);
    
    if(imageRemainingToCaptureCount_ == 0)
        [self doneShooting];
}

// The block is only supposed to be used by the app delegate when processing in the background
-(void)processBufferQueueWithCompletion:(pictureProcessingCompleted)block
{
    if(CMBufferQueueGetBufferCount(sampleBufferQueue_) <= 0)
    {
        NSLog(@"CMBufferQueue is empty. Nothing to do, so exiting.");
        return;
    }
    
    int operationsInProgress = queueOperationsInProgressCount_;
    
    if(operationsInProgress >= kMaxConcurrentBufferOperations)
        return;
    
    if(OSAtomicCompareAndSwap32(operationsInProgress, operationsInProgress + 1, &queueOperationsInProgressCount_) > kMaxConcurrentBufferOperations)
        return;
    
    CMSampleBufferRef sampleBuffer = (CMSampleBufferRef)CMBufferQueueDequeueAndRetain(sampleBufferQueue_);
    //NSLog(@"Dequeued sample buffer %p\n", sampleBuffer);
    
    /* Cocoa Processing */
    [self processSampleBufferAsCocoa:sampleBuffer withCompletion:^(void)
     {
         OSAtomicDecrement32(&queueOperationsInProgressCount_);
         OSAtomicDecrement32(&imageRemainingToSaveCount_);
         
         if(self.delegate && [self.delegate respondsToSelector:@selector(imageRemainingToSaveCountChangedTo:)])
             [self.delegate imageRemainingToSaveCountChangedTo:imageRemainingToSaveCount_];
         
         if(CMBufferQueueGetBufferCount(sampleBufferQueue_) <= 0 && picture_taken_delegate_)
         {
             NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
             CGImageRef thumbnail = thumbnailCreateFromData2(jpegData, 79);
             picture_taken_delegate_(thumbnail);
             CGImageRelease(thumbnail);
             
             block();
             
             CFRelease(sampleBuffer);
         }
         else
         {
             CFRelease(sampleBuffer);
             
             // Keep processing buffer until done
             dispatch_async(picture_saving_queue_, ^(void)
                            {
                                [self processBufferQueueWithCompletion:^(void){}];
                            });

         }
     }];
    
    //NSLog(@"No more items in Queue\n");
}

-(void)processSampleBufferAsCocoa:(CMSampleBufferRef)sampleBuffer withCompletion:(pictureProcessingCompleted)block
{
    //NSLog(@"Getting jpegData from sampleBuffer\n");
    NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
    CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
    
    [[PYFileManager instance] saveImageData:jpegData attachments:(id)attachments];
    
    if(attachments)
        CFRelease(attachments);

    block();
    
    /*NSDictionary *savedDictionary;
    NSData *savedJpegData;
    
    [[PYFileManager instance] getImageData:&savedJpegData andAttachments:&savedDictionary fromName:fileName];
    
    [self saveNSDataImageToCameraRoll:savedJpegData :(id)savedDictionary withCompletion:^(void)
     {
         block();
         if(attachments)
             CFRelease(attachments);
     }];*/
}

CMTime sampleBufferDurationCallback(CMBufferRef buffer, void* refcon)
{
    return CMTimeMake(1 * 60 * 60, 1);
}

-(void)doneShooting
{
    NSLog(@"Done shooting\n");
    
    if(delegate && [delegate respondsToSelector:@selector(imageCaptureCompleted)])
        [delegate imageCaptureCompleted];
    
    dispatch_async(picture_saving_queue_, ^(void)
                   {
                        [self processBufferQueueWithCompletion:^(void){}];
                   });
    
}

-(void)doneProcessingPictures
{
    NSLog(@"Done processing pictures\n:");
    //pictureProcessingCompletedDelegate_();
}

#pragma mark - Core Graphics Image Processing

- (BOOL)writeCGImageToCameraRoll:(CGImageRef)cgImage withMetadata:(NSDictionary *)metadata withCompletionBlock:(pictureProcessingCompleted)block
{
	CFMutableDataRef destinationData = CFDataCreateMutable(kCFAllocatorDefault, 0);
	CGImageDestinationRef destination = CGImageDestinationCreateWithData(destinationData, CFSTR("public.jpeg"), 1, NULL);
	BOOL success = (destination != NULL);
	require(success, bail);
    
	const float JPEGCompQuality = 0.85f; // JPEGHigherQuality
	CFMutableDictionaryRef optionsDict = NULL;
	CFNumberRef qualityNum = NULL;
	
	qualityNum = CFNumberCreate(0, kCFNumberFloatType, &JPEGCompQuality);    
	
    if (qualityNum)
    {
		optionsDict = CFDictionaryCreateMutable(0, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
		if (optionsDict)
			CFDictionarySetValue(optionsDict, kCGImageDestinationLossyCompressionQuality, qualityNum);
		CFRelease(qualityNum);
	}
	
	CGImageDestinationAddImage( destination, cgImage, optionsDict );

	success = CGImageDestinationFinalize( destination );
    
	if (optionsDict)
		CFRelease(optionsDict);
	
	require(success, bail);
	
	CFRetain(destinationData);
    //NSLog(@"Creating asset library\n");
	//ALAssetsLibrary *library = [ALAssetsLibrary new];// [ALAssetsLibrary new];
    //NSLog(@"Created asset library: %p. DestinationData: %p, metadata: %p\n", library, destinationData, metadata);
	[assetLibrary_ writeImageDataToSavedPhotosAlbum:(id)destinationData metadata:metadata completionBlock:^(NSURL *assetURL, NSError *error)
    {
		if (destinationData)
			CFRelease(destinationData);
        
        block();
	}];

    
bail:
	if (destinationData)
    {
        //NSLog(@"Released destinationData\n");
        CFRelease(destinationData);
    }
	if (destination)
    {
        //NSLog(@"Released destination\n");
        CFRelease(destination);
    }
    
	return success;
}
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size);
static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size) 
{	
	CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel;
	CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
	CVPixelBufferRelease( pixelBuffer );
}

#pragma mark - Cocoa Image Processing

-(void)saveNSDataImageToCameraRoll:(NSData*)jpegData:(NSDictionary*)attachments withCompletion:(pictureProcessingCompleted)block
{
    //ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    NSLog(@"About to save image to camera roll\n");
    [assetLibrary_ writeImageDataToSavedPhotosAlbum:jpegData metadata:attachments completionBlock:^(NSURL *assetURL, NSError *error)
     {
         if (error)
         {
             NSLog(@"Image save failed with error %@\n", error);
             [self displayErrorOnMainQueue:error withMessage:@"Save to camera roll failed"];
         }
         else
             NSLog(@"Saved image to camera roll\n");
         
         block();
     }];
}

- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    switch (deviceOrientation)
    {
        // From Technical Q&A QA1744 - Setting the orientation of video captured with AV Foundation (http://developer.apple.com/library/ios/#qa/qa1744/_index.html#//apple_ref/doc/uid/DTS40011134):
        //  The iPod touch, iPhone 4 and iPad 2 front facing camera is mounted AVCaptureVideoOrientationLandscapeLeft, and the back-facing camera is mounted AVCaptureVideoOrientationLandscapeRight.
        case UIDeviceOrientationLandscapeLeft:
            return AVCaptureVideoOrientationLandscapeRight;
        
        case UIDeviceOrientationLandscapeRight:
        case UIDeviceOrientationFaceUp:
        case UIDeviceOrientationFaceDown:
            return AVCaptureVideoOrientationLandscapeLeft;
            
        case UIDeviceOrientationPortrait:
            return AVCaptureVideoOrientationPortrait;
            
        case UIDeviceOrientationPortraitUpsideDown:
            return AVCaptureVideoOrientationPortraitUpsideDown;
        
        default:
            return 0;
    }
}

// utility routine to display error alert if  fails
- (void)displayErrorOnMainQueue:(NSError *)error withMessage:(NSString *)message
{
    return;
	dispatch_async(dispatch_get_main_queue(), ^(void) {
		UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"%@ (%d)", message, (int)[error code]]
															message:[error localizedDescription]
														   delegate:nil 
												  cancelButtonTitle:@"Dismiss" 
												  otherButtonTitles:nil];
		[alertView show];
        [alertView release];
	});
}

-(void)teardownAVCapture
{
    [stillImageOutput removeObserver:self forKeyPath:@"isCapturingStillImage"];
    //[previewLayer removeFromSuperlayer];
}

-(void)dealloc
{
    [super dealloc];
    [self teardownAVCapture];
    
    if(pictureTakingOperationQueue_)
        [pictureTakingOperationQueue_ release];
    
    [assetLibrary_ release];
    CFRelease(sampleBufferQueue_);
}

@end
