//
//  CWMediaWritter.m
//  CWDevelop
//
//  Created by LittoCats on 7/8/14.
//  Copyright (c) 2014 Littocats. All rights reserved.
//

#import "CWMediaWriter.h"
#import <AVFoundation/AVFoundation.h>
#import <MobileCoreServices/MobileCoreServices.h>

//newSampleBuffer has been retained , you must release it when done with it
static CMSampleBufferRef CMSampleBufferAdapteTimeOffset(CMSampleBufferRef sampleBuffer, CMTime timeOffset);
static CMSampleBufferRef CMSampleBufferAdapteTimeOffset(CMSampleBufferRef sampleBuffer, CMTime timeOffset)
{
    CMItemCount count;
    CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nil, &count);
    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
    CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, count, pInfo, &count);
    
    for (CMItemCount i = 0; i < count; i++){
        pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, timeOffset);
        pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, timeOffset);
    }
    
    CMSampleBufferRef newSampleBuffer;
    CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sampleBuffer, count, pInfo, &newSampleBuffer);
    free(pInfo);
    return newSampleBuffer;
}

@interface CWMediaWriter ()

//Writter management
@property (nonatomic, strong) AVAssetWriter *assetWriter;
@property (nonatomic, strong) AVAssetWriterInput *assetWriterAudioIn;
@property (nonatomic, strong) AVAssetWriterInput *assetWriterVideoIn;
@property (nonatomic, strong) dispatch_queue_t movieWritingQueue;

//Utilies
@property (nonatomic, strong) void (^progressProcessor)(NSTimeInterval time);

@property (nonatomic, strong) NSURL *mediaURL;

@property (nonatomic) BOOL isRunning;
@property (nonatomic) BOOL readyToRecordAudio;
@property (nonatomic) BOOL readyToRecordVideo;

@property (nonatomic) CMTime SampleBufferTimeStampOffset;
@property (nonatomic) NSTimeInterval pauseTime;
@end

@implementation CWMediaWriter

+ (instancetype)defaultWriter
{
    static CWMediaWriter *defaultWriter = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        defaultWriter = [CWMediaWriter new];
    });
    if (defaultWriter.isRunning) {
        NSLog(@"CWMediaWriter is in use by another process .");
        return nil;
    }
    return defaultWriter;
}

- (id)init
{
    self = [super init];
    if (self) {
        
    }
    return self;
}

- (void)progress:(void (^)(NSTimeInterval))progress
{
    self.progressProcessor = progress ? ^(NSTimeInterval time){
        dispatch_async(dispatch_get_main_queue(), ^{
            progress(time);
        });
    }: nil;
}

#pragma mark- control
- (void)start
{
    if (!_movieWritingQueue) {
        self.movieWritingQueue = dispatch_queue_create("Movie Writing Queue", DISPATCH_QUEUE_SERIAL);
    }
    dispatch_async(_movieWritingQueue, ^{
        _mediaURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), [[NSDate date] description]]];
		// Remove the file if one with the same name already exists
		[self removeFile:_mediaURL];
        
        //init SampleBufferTimeStampOffset
        self.SampleBufferTimeStampOffset = CMTimeMake(0, 1000000000);
		// Create an asset writer
		NSError *error;
        _assetWriter = [[AVAssetWriter alloc] initWithURL:_mediaURL fileType:(NSString *)kUTTypeMPEG4 error:&error];
		if (error)
			[self showError:error];
        else{
            self.isRunning = YES;
        }
	});
}
- (void)pause:(BOOL)pause
{
    dispatch_async(_movieWritingQueue, ^{
        if (pause && _isRunning) {
            self.isRunning = !pause;
            self.pauseTime = [NSDate timeIntervalSinceReferenceDate];
        }else if (!pause && !_isRunning){
            self.isRunning = !pause;
            NSTimeInterval currentTime = [NSDate timeIntervalSinceReferenceDate];
            
            CMTime currentPauseOffset = CMTimeMake((- _pauseTime + currentTime)*1000000000, 1000000000);
            _SampleBufferTimeStampOffset = CMTimeAdd(_SampleBufferTimeStampOffset, currentPauseOffset);
        }
    });
}
- (void)stop:(void (^)(NSURL *))doneProcessor
{
    dispatch_async(_movieWritingQueue, ^{
        [_assetWriter finishWritingWithCompletionHandler:^{
            _assetWriter = nil;
            if (doneProcessor) {
                dispatch_async(dispatch_get_main_queue(), ^{
                    doneProcessor(_mediaURL);
                });
            }
        }];
    });
}
#pragma mark Recording
- (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType
{
    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
    CMSampleBufferRef sampleBufferTemp = CMSampleBufferAdapteTimeOffset(sampleBuffer, _SampleBufferTimeStampOffset);
	CFRetain(formatDescription);
	dispatch_async(_movieWritingQueue, ^{
        if (!_isRunning) {
            CFRelease(sampleBufferTemp);
            CFRelease(formatDescription);
            return;
        }
        if ( _assetWriter.status == AVAssetWriterStatusUnknown ) {
            
            if ([_assetWriter startWriting]) {
                [_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
            }
            else {
                [self showError:[_assetWriter error]];
            }
        }
		if (mediaType == AVMediaTypeVideo) {
            // Initialize the video input if this is not done yet
            if (!_readyToRecordVideo)
                _readyToRecordVideo = [self setupAssetWriterVideoInput:formatDescription];
            // Write video data to file
            if (_readyToRecordVideo && _assetWriterVideoIn.readyForMoreMediaData)
                if (_assetWriterVideoIn.readyForMoreMediaData)
                    if (![_assetWriterVideoIn appendSampleBuffer:sampleBuffer])
                        [self showError:[_assetWriter error]];
        }
        else if (mediaType == AVMediaTypeAudio) {
            // Initialize the audio input if this is not done yet
            if (!_readyToRecordAudio)
                _readyToRecordAudio = [self setupAssetWriterAudioInput:formatDescription];
            // Write audio data to file
            if (_readyToRecordAudio && _assetWriterAudioIn.readyForMoreMediaData)
                if (_assetWriterAudioIn.readyForMoreMediaData)
                    if (![_assetWriterAudioIn appendSampleBuffer:sampleBuffer])
                        [self showError:[_assetWriter error]];
        }
        
        
        if (_progressProcessor) {
            CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBufferTemp);
            _progressProcessor(CMTimeGetSeconds(time));
        }
        
        CFRelease(sampleBufferTemp);
		CFRelease(formatDescription);
	});

}

- (BOOL) setupAssetWriterAudioInput:(CMFormatDescriptionRef)currentFormatDescription
{
	const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(currentFormatDescription);
    
	size_t aclSize = 0;
	const AudioChannelLayout *currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(currentFormatDescription, &aclSize);
	NSData *currentChannelLayoutData = nil;
	
	// AVChannelLayoutKey must be specified, but if we don't know any better give an empty data and let AVAssetWriter decide.
	if ( currentChannelLayout && aclSize > 0 )
		currentChannelLayoutData = [NSData dataWithBytes:currentChannelLayout length:aclSize];
	else
		currentChannelLayoutData = [NSData data];
	
	NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
											  [NSNumber numberWithInteger:kAudioFormatMPEG4AAC], AVFormatIDKey,
											  [NSNumber numberWithFloat:currentASBD->mSampleRate], AVSampleRateKey,
											  [NSNumber numberWithInt:64000], AVEncoderBitRatePerChannelKey,
											  [NSNumber numberWithInteger:currentASBD->mChannelsPerFrame], AVNumberOfChannelsKey,
											  currentChannelLayoutData, AVChannelLayoutKey,
											  nil];
	if ([_assetWriter canApplyOutputSettings:audioCompressionSettings forMediaType:AVMediaTypeAudio]) {
		_assetWriterAudioIn = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
		_assetWriterAudioIn.expectsMediaDataInRealTime = YES;
		if ([_assetWriter canAddInput:_assetWriterAudioIn])
			[_assetWriter addInput:_assetWriterAudioIn];
		else {
			NSLog(@"Couldn't add asset writer audio input.");
            return NO;
		}
	}
	else {
		NSLog(@"Couldn't apply audio output settings.");
        return NO;
	}
    
    return YES;
}

- (BOOL) setupAssetWriterVideoInput:(CMFormatDescriptionRef)currentFormatDescription
{
	float bitsPerPixel;
	CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(currentFormatDescription);
	int numPixels = dimensions.width * dimensions.height;
	int bitsPerSecond;
	
	// Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
	if ( numPixels < (640 * 480) )
		bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
	else
		bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
	
	bitsPerSecond = numPixels * bitsPerPixel;
	
	NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
											  AVVideoCodecH264, AVVideoCodecKey,
											  [NSNumber numberWithInteger:dimensions.width], AVVideoWidthKey,
											  [NSNumber numberWithInteger:dimensions.height], AVVideoHeightKey,
											  [NSDictionary dictionaryWithObjectsAndKeys:
											   [NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
											   [NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
											   nil], AVVideoCompressionPropertiesKey,
											  nil];
	if ([_assetWriter canApplyOutputSettings:videoCompressionSettings forMediaType:AVMediaTypeVideo]) {
		_assetWriterVideoIn = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
		_assetWriterVideoIn.expectsMediaDataInRealTime = YES;
		if ([_assetWriter canAddInput:_assetWriterVideoIn])
			[_assetWriter addInput:_assetWriterVideoIn];
		else {
			NSLog(@"Couldn't add asset writer video input.");
            return NO;
		}
	}
	else {
		NSLog(@"Couldn't apply video output settings.");
        return NO;
	}
    
    return YES;
}

#pragma mark Error Handling

- (void)showError:(NSError *)error
{
    CFRunLoopPerformBlock(CFRunLoopGetMain(), kCFRunLoopCommonModes, ^(void) {
        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[error localizedDescription]
                                                            message:[error localizedFailureReason]
                                                           delegate:nil
                                                  cancelButtonTitle:@"OK"
                                                  otherButtonTitles:nil];
        [alertView show];
    });
}

- (void)removeFile:(NSURL *)fileURL
{
    NSFileManager *fileManager = [NSFileManager defaultManager];
    NSString *filePath = [fileURL path];
    if ([fileManager fileExistsAtPath:filePath]) {
        NSError *error;
        BOOL success = [fileManager removeItemAtPath:filePath error:&error];
		if (!success)
			[self showError:error];
    }
}

@end
