//
//  CWMediaCapture.m
//  CWDevelop
//
//  Created by LittoCats on 7/7/14.
//  Copyright (c) 2014 Littocats. All rights reserved.
//

#import "CWMediaStreamCapture.h"


@interface CWMediaStreamCapture () <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (nonatomic, weak) UIView *preview;

@property (nonatomic) CWMediaStreamCaptureOptions options;

// Session management.
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureConnection *audioConnection;
@property (nonatomic, strong) AVCaptureConnection *videoConnection;
@property (nonatomic, strong) dispatch_queue_t sessionQueue;

@property (nonatomic, strong) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic, strong) AVCaptureDeviceInput *audioDeviceInput;

@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDeviceOutput;
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioDeviceOutput;

//Utilities
@property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
@property (nonatomic) BOOL isPaused;

@property (nonatomic, strong) void (^dataProcessor)(CMSampleBufferRef sampleBufferRef, NSString *mediaType);
@end

@implementation CWMediaStreamCapture

+ (CWMediaStreamCapture *)captureWithOptions:(CWMediaStreamCaptureOptions)options
                         dataProcessor:(void (^)(CMSampleBufferRef sampleBufferRef, NSString *mediaType))dataProcessor
{
    CWMediaStreamCapture *capture = [[CWMediaStreamCapture alloc] initWithOptions:options dataProcessor:^(CMSampleBufferRef sampleBufferRef, NSString *mediaType) {
        if (dataProcessor) {
            dataProcessor(sampleBufferRef, mediaType);
        }
    }];
    return capture;
}
- (void)start
{
    dispatch_async(_sessionQueue, ^{
        [_captureSession startRunning];
    });
}

- (void)pause:(BOOL)isPause
{
    self.isPaused = isPause;
}

- (void)stop
{
    [_captureSession stopRunning];
}



#pragma mark- data output
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CFRetain(sampleBuffer);
    NSString *mediaType = connection == _videoConnection ? AVMediaTypeVideo : connection == _audioConnection ? AVMediaTypeAudio : nil;
    if (!_isPaused && mediaType && _dataProcessor) {
        _dataProcessor(sampleBuffer, mediaType);
    }
    CFRelease(sampleBuffer);
}

#pragma mark- camera setup

- (id)initWithOptions:(CWMediaStreamCaptureOptions)options dataProcessor:(void (^)(CMSampleBufferRef sampleBufferRef, NSString *mediaType))dataProcessor
{
    self = [super init];
    self.options = options;
    self.dataProcessor = dataProcessor;
    if (self) {
        // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
        // Why not do all of this on the main queue?
        // -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
        
        self.sessionQueue = dispatch_queue_create("CWMediaCapture session queue", DISPATCH_QUEUE_SERIAL);
        
        [self setupCaptureSession];
    }
    return self;
}
- (void)dealloc
{
    [_captureSession stopRunning];
    [_captureSession removeOutput:_videoDeviceOutput];
    [_captureSession removeOutput:_audioDeviceOutput];
    [_captureSession removeInput:_videoDeviceInput];
    [_captureSession removeInput:_audioDeviceInput];
}

- (void)setupCaptureSession
{
	/*
     * Create capture session
     */
    self.captureSession = [[AVCaptureSession alloc] init];
	
	// Check for device authorization
    if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 7.0)
        [self checkDeviceAuthorizationStatus];
    
    if ((_options & (1 << 1)) >> 1) {
        /*
         * Create audio connection
         */
        NSError *audioError;
        self.audioDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject] error:&audioError];
        if (audioError) NSLog(@"audioIn init error : %@",audioError);
        else  if ([_captureSession canAddInput:_audioDeviceInput])
            [_captureSession addInput:_audioDeviceInput];
        
        
        self.audioDeviceOutput = [[AVCaptureAudioDataOutput alloc] init];
        [_audioDeviceOutput setSampleBufferDelegate:self queue:_sessionQueue];
        if ([_captureSession canAddOutput:_audioDeviceOutput])
            [_captureSession addOutput:_audioDeviceOutput];
        _audioConnection = [_audioDeviceOutput connectionWithMediaType:AVMediaTypeAudio];
    }
	   
    
    if ((_options & 1)) {
        /*
         * Create video connection
         */
        NSError *videoError;
        self.videoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack] error:&videoError];
        if (videoError) NSLog(@"videoIn init error : %@",videoError);
        else if ([_captureSession canAddInput:_videoDeviceInput])
            [_captureSession addInput:_videoDeviceInput];
        
        self.videoDeviceOutput = [[AVCaptureVideoDataOutput alloc] init];
        /*
         This prefers to discard late video frames early in the capture pipeline, since its
         processing can take longer than real-time on some platforms (such as iPhone 3GS).
         Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
         alwaysDiscardsLateVideoFrames property to NO.
         */
        [_videoDeviceOutput setAlwaysDiscardsLateVideoFrames:YES];
        [_videoDeviceOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
        [_videoDeviceOutput setSampleBufferDelegate:self queue:_sessionQueue];
        if ([_captureSession canAddOutput:_videoDeviceOutput])
            [_captureSession addOutput:_videoDeviceOutput];
        _videoConnection = [_videoDeviceOutput connectionWithMediaType:AVMediaTypeVideo];
        
    }
    
}

- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
	NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
	AVCaptureDevice *captureDevice = [devices firstObject];
	
	for (AVCaptureDevice *device in devices)
	{
		if ([device position] == position)
		{
			captureDevice = device;
			break;
		}
	}
	
	return captureDevice;
}

//ios7 and later need to call this method
- (void)checkDeviceAuthorizationStatus
{
	NSString *mediaType = AVMediaTypeVideo;
	
	[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
		if (granted)
		{
			//Granted access to mediaType
			[self setDeviceAuthorized:YES];
		}
		else
		{
			//Not granted access to mediaType
			dispatch_async(dispatch_get_main_queue(), ^{
				[[[UIAlertView alloc] initWithTitle:@"AVCam!"
											message:@"AVCam doesn't have permission to use Camera, please change privacy settings"
										   delegate:self
								  cancelButtonTitle:@"OK"
								  otherButtonTitles:nil] show];
				[self setDeviceAuthorized:NO];
			});
		}
	}];
}

#pragma mark- camera control
- (void)setCameraPosition:(AVCaptureDevicePosition)cameraPosition
{
    AVCaptureDevice *videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:cameraPosition];
    AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
    
    [_captureSession beginConfiguration];
    
    [_captureSession removeInput:[self videoDeviceInput]];
    if ([_captureSession canAddInput:videoDeviceInput])
    {
        [self setFlashMode:self.flashMode];

        [[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureDeviceSubjectAreaDidChangeNotification
                                                          object:videoDevice
                                                           queue:[NSOperationQueue mainQueue]
                                                      usingBlock:^(NSNotification *note) {
                                                          CGPoint devicePoint = CGPointMake(.5, .5);
                                                          [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus
                                                               exposeWithMode:AVCaptureExposureModeContinuousAutoExposure
                                                                atDevicePoint:devicePoint
                                                     monitorSubjectAreaChange:NO];
                                                          [[NSNotificationCenter defaultCenter] removeObserver:self name:note.name object:note.object];
                                                      }];
        
        [_captureSession addInput:videoDeviceInput];
        [self setVideoDeviceInput:videoDeviceInput];
    }
    else
    {
        [_captureSession addInput:[self videoDeviceInput]];
    }
    
    [_captureSession commitConfiguration];
}
- (AVCaptureDevicePosition)cameraPosition
{
    return [[[self videoDeviceInput] device] position];
}

- (void)setFPS:(NSInteger)fps
{
    
}

- (void)focusAtPreviewPoint:(CGPoint)pointInPreview
{
}

- (void)focusWithMode:(AVCaptureFocusMode)focusMode
       exposeWithMode:(AVCaptureExposureMode)exposureMode
        atDevicePoint:(CGPoint)point
monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
	AVCaptureDevice *device = [[self videoDeviceInput] device];
    NSError *error = nil;
    if ([device lockForConfiguration:&error])
    {
        if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
        {
            [device setFocusMode:focusMode];
            [device setFocusPointOfInterest:point];
        }
        if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
        {
            [device setExposureMode:exposureMode];
            [device setExposurePointOfInterest:point];
        }
        [device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
        [device unlockForConfiguration];
    }
    else{
        NSLog(@"%@", error);
    }
}

- (void)setFlashMode:(AVCaptureFlashMode)flashMode
{
    AVCaptureDevice *device = [[self videoDeviceInput] device];
	if ([device hasFlash] && [device isFlashModeSupported:flashMode])
	{
		NSError *error = nil;
		if ([device lockForConfiguration:&error])
		{
			[device setFlashMode:flashMode];
			[device unlockForConfiguration];
		}
		else
		{
			NSLog(@"%@", error);
		}
	}
}
- (AVCaptureFlashMode)flashMode
{
    return [[[self videoDeviceInput] device] flashMode];
}

@end

@interface CWMediaStreamCapturePreview ()

@property(nonatomic, readonly, retain) AVCaptureVideoPreviewLayer *layer;

@property (nonatomic, strong) CWMediaStreamCapture *capture;
@end
@implementation CWMediaStreamCapturePreview

+ (Class)layerClass
{
    return [AVCaptureVideoPreviewLayer class];
}

+ (instancetype)previewWithMediaStreamCapture:(CWMediaStreamCapture *)capture size:(CGSize)size
{
    CWMediaStreamCapturePreview *instance = [[CWMediaStreamCapturePreview alloc] init];
    [instance setFrame:CGRectMake(0, 0, size.width, size.height)];
    instance.capture = capture;
    return instance;
}

#pragma mark- tap to focus
- (void)tapToFocus:(UITapGestureRecognizer *)tap
{
    CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.layer captureDevicePointOfInterestForPoint:[tap locationInView:[tap view]]];
	[_mediaStreamcapture focusWithMode:AVCaptureFocusModeAutoFocus
         exposeWithMode:AVCaptureExposureModeAutoExpose
          atDevicePoint:devicePoint
monitorSubjectAreaChange:YES];
}
#pragma mark-
- (void)setCapture:(CWMediaStreamCapture *)capture
{
    _mediaStreamcapture = capture;
    [(AVCaptureVideoPreviewLayer *)self.layer setSession:capture.captureSession];
    [self setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapToFocus:)]];
}

- (void)setVideoGravity:(NSString *)videoGravity
{
    [(AVCaptureVideoPreviewLayer *)self.layer setVideoGravity:videoGravity];
}
- (NSString *)videoGravity
{
    return [(AVCaptureVideoPreviewLayer *)self.layer videoGravity];
}
#pragma mark- delloc
- (void)dealloc
{
    [(AVCaptureVideoPreviewLayer *)self.layer setSession:nil];
}
@end

void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)
{
	CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel;
	CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
	CVPixelBufferRelease( pixelBuffer );
}

CGImageRef CGImageCreateWithCVPixelBuffer(CVPixelBufferRef pixelBuffer)
{
	OSStatus err = noErr;
	OSType sourcePixelFormat;
	size_t width, height, sourceRowBytes;
	void *sourceBaseAddr = NULL;
	CGBitmapInfo bitmapInfo;
	CGColorSpaceRef colorspace = NULL;
	CGDataProviderRef provider = NULL;
	CGImageRef image = NULL;
	
	sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer );
	if ( kCVPixelFormatType_32ARGB == sourcePixelFormat )
		bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst;
	else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat )
		bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst;
	else
		return NULL; // only uncompressed pixel formats
	
	sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer );
	width = CVPixelBufferGetWidth( pixelBuffer );
	height = CVPixelBufferGetHeight( pixelBuffer );
	
	CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
	sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer );
	
	colorspace = CGColorSpaceCreateDeviceRGB();
    
	CVPixelBufferRetain( pixelBuffer );
	provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer);
	image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault);
	
bail:
	if ( err && image ) {
		CGImageRelease( image );
		image = NULL;
	}
	if ( provider ) CGDataProviderRelease( provider );
	if ( colorspace ) CGColorSpaceRelease( colorspace );
	return image;
}

CMSampleBufferRef CMSampleBufferCreateCopyWithTimeOffset(CMSampleBufferRef sampleBuffer, CMTime timeOffset)
{
    CMItemCount count;
    CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nil, &count);
    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
    CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, count, pInfo, &count);
    
    for (CMItemCount i = 0; i < count; i++){
        pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, timeOffset);
        pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, timeOffset);
    }
    
    CMSampleBufferRef newSampleBuffer;
    CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sampleBuffer, count, pInfo, &newSampleBuffer);
    free(pInfo);
    return newSampleBuffer;
}