//
//  VideoRecordingdeo.m
//  VideoRecording
//
//  Create by sunny188 on 13-4-8.
//  Copyright (c) 2013年 sunny188 All rights reserved.
//

/* ×××××××××××××××××××××××××××
**  Version: V0.0.1
**  Author : sunny188
* ××××××××××××××××××××××××××*/

#import "VideoRecording.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <AudioToolbox/AudioToolbox.h>

static NSString* const kFileName = @"VideoRecording.mov";

@interface VideoRecording(){

    AVAssetWriter *videoWriter;
    AVAssetWriterInput *videoWriterInput;
    AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
    
    BOOL           _recording;     //正在录制中
    BOOL           _writing;       //正在将帧写入文件

    NSDate         *startedAt;     //录制的开始时间
    CGContextRef   context;        //绘制layer的context
    NSTimer        *timer;         //按帧率写屏的定时器
    
    CALayer *_captureLayer;
    NSUInteger  _frameRate;
}

- (BOOL) setUpWriter;        //设置图像写入
- (void) cleanupWriter;      //清除写入数据
- (void) completeRecordingSession;  //录像完成写入文件
- (void) drawFrame;  //绘制每帧

@end


@implementation VideoRecording

@synthesize frameRate = _frameRate;
@synthesize captureLayer = _captureLayer;
@synthesize screenWidth,screenHeight;
@synthesize videoPath;



/*
**  初始化
*/

- (id)init
{
    self = [super init];
    if (self) {
        _frameRate = 20; //默认帧率为20
    }
    return self;
}




/*
*
*   开始视频录制 
*
*/

- (void)startRecording
{
    if (! _recording && _captureLayer) {
        bool result = [self setUpWriter];
        if (result){
            //NSLog(@"video recording start");
            startedAt = [[NSDate date] retain];
            _recording = true;
            _writing = false;
            
            //绘屏的定时器
            NSDate *nowDate = [NSDate date];
            timer = [[NSTimer alloc] initWithFireDate:nowDate interval:1.0/_frameRate target:self selector:@selector(drawFrame) userInfo:nil repeats:YES];
            
            NSRunLoop* runLoop = [NSRunLoop currentRunLoop];
            [runLoop addTimer:timer forMode:NSRunLoopCommonModes];
            [timer release];
            [runLoop run];
            
            
        }
    }
}





/*
**  停止录制 
*/

- (void)stopRecording
{
    if (_recording) {
        _recording = false;
        
        if(timer != nil){
            [timer invalidate];
            timer = nil;
        }
        
        [self completeRecordingSession];
        [self cleanupWriter];
    }
}




/*
** 绘制每帧图像
*/

- (void)drawFrame
{
    if (!_writing) {
        _writing = true;
        
        size_t width  = CGBitmapContextGetWidth(context);
        size_t height = CGBitmapContextGetHeight(context);
        
        if(width<screenWidth*0.7){
            width = screenWidth;
            height = screenHeight;
        }
       
        CGContextClearRect(context, CGRectMake(0, 0,width , height));
        [self.captureLayer renderInContext:context];
        CGImageRef cgImage = CGBitmapContextCreateImage(context);
        
        if (_recording) {
            float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
            CMTime time = CMTimeMake((int)millisElapsed, 1000);
            
            //write
            if (![videoWriterInput isReadyForMoreMediaData]) {
                NSLog(@"Not ready for video data");
            }
            else {
                
                CVPixelBufferRef pixelBuffer = NULL;
                CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
                
                int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
                if(status != 0){
                    NSLog(@"Error creating pixel buffer:  status=%d", status);
                }
                // set image data into pixel buffer
                CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
                uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
                
                CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);
                
                if(status == 0){
                    BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
                    if (!success)
                        NSLog(@"Warning:  Unable to write buffer to video");
                }
                
                //clean up
                CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
                CVPixelBufferRelease( pixelBuffer );
                CFRelease(image);
                
            }
        }
        
        CGImageRelease(cgImage);
        
        _writing = false;
    }
}



/*
×× 临时文件
*/

- (NSString*)tempFilePath {
    
    NSArray  *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
    NSString *filePath = [[[paths objectAtIndex:0] stringByAppendingPathComponent:kFileName] retain];
    return [filePath autorelease];
    
}





- (BOOL) setUpWriter {
    
    CGSize size = self.captureLayer.frame.size;
    
    if(size.width < screenWidth*0.7){
        size.width = screenWidth;
        size.height = screenHeight;
    }
    
    //Clear Old TempFile
	NSError  *error = nil;
    NSString *filePath=[self tempFilePath];
    NSFileManager* fileManager = [NSFileManager defaultManager];
	if ([fileManager fileExistsAtPath:filePath]) {
		if ([fileManager removeItemAtPath:filePath error:&error] == NO)
        {
            return NO;
		}
	}
    
    //Configure videoWriter
    NSURL   *fileUrl=[NSURL fileURLWithPath:filePath];
	videoWriter = [[AVAssetWriter alloc] initWithURL:fileUrl fileType:AVFileTypeQuickTimeMovie error:&error];
	NSParameterAssert(videoWriter);
	
    
	//Configure videoWriterInput
	NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
										   [NSNumber numberWithDouble:size.width*size.height], AVVideoAverageBitRateKey,
										   nil ];
	
	NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
								   AVVideoCodecH264, AVVideoCodecKey,
								   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
								   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
								   videoCompressionProps, AVVideoCompressionPropertiesKey,
								   nil];
	
	videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
	
	NSParameterAssert(videoWriterInput);
	videoWriterInput.expectsMediaDataInRealTime = YES;
	NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
									  [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
	
	avAdaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] retain];
	
	//add input
	[videoWriter addInput:videoWriterInput];
	[videoWriter startWriting];
	[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
    
    
    //create context
    if (context== NULL) {
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        context = CGBitmapContextCreate (NULL,
                                         size.width,
                                         size.height,
                                         8,//bits per component
                                         size.width * 4,
                                         colorSpace,
                                         kCGImageAlphaNoneSkipFirst);
        CGColorSpaceRelease(colorSpace);
        CGContextSetAllowsAntialiasing(context,NO);
        CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, size.height);
        CGContextConcatCTM(context, flipVertical);
    }
    if (context== NULL)
    {
		//fprintf (stderr, "Context not created!");
        return NO;
	}
	
	return YES;
}




- (void) completeRecordingSession {
	
	[videoWriterInput markAsFinished];
	
	// Wait for the video
	int status = videoWriter.status;
	while (status == AVAssetWriterStatusUnknown)
    {
		//NSLog(@"Waiting...");
		[NSThread sleepForTimeInterval:0.5f];
		status = videoWriter.status;
	}
    
	NSString *filePath = [self tempFilePath];

    BOOL success = [videoWriter finishWriting];
    if(success){
        if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(filePath)) {  //保存至相册
            UISaveVideoAtPathToSavedPhotosAlbum(filePath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
        }
    }
}



/*
** 视频保存出错
*/

- (void)video:(NSString *)videoPath
            didFinishSavingWithError:(NSError *) error
            contextInfo: (void *)contextInfo{
	if (error) {
		NSLog(@"%@",[error localizedDescription]);
	}
}





/*
**  视频路径
**  视频文件命名格式：以年月日时分秒.mp4 
××  此视频格式可以根据要求更改
*/

- (NSString *)videoPath{
    
    NSDate *now = [NSDate date];
    
    NSDateFormatter *formatter =[[[NSDateFormatter alloc] init] autorelease];
    [formatter setTimeStyle:NSDateFormatterMediumStyle];
    NSCalendar *calendar = [[[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar] autorelease];
    NSDateComponents *comps = [[[NSDateComponents alloc] init] autorelease];
    NSInteger unitFlags = NSYearCalendarUnit  |
                          NSMonthCalendarUnit |
                          NSDayCalendarUnit   |
                          NSWeekdayCalendarUnit |
                          NSHourCalendarUnit |
                          NSMinuteCalendarUnit |
                          NSSecondCalendarUnit;
    
    comps = [calendar components:unitFlags fromDate:now];
    
    return [NSString stringWithFormat:@"%i%i%i%i%i%i.mp4",[comps year],[comps month],[comps day],[comps hour],[comps minute],[comps second]];
    
}




/*
** 释放所有资源
*/


- (void) cleanupWriter {
    
	[avAdaptor release];
	avAdaptor = nil;
	
	[videoWriterInput release];
	videoWriterInput = nil;
	
	[videoWriter release];
	videoWriter = nil;
	
	[startedAt release];
	startedAt = nil;
    
    CGContextRelease(context);
    context=NULL;
}


@end