//
//  LZXAppDelegate.m
//  LocalRecorderDemo
//
//  Created by x on 13-7-25.
//  Copyright (c) 2013年 . All rights reserved.
//

#import "LocalRecorder.h"

#define recordFPS 1.0f/11.0f
#define frameBuffer 10
#define MainFrame [[UIScreen mainScreen] applicationFrame]
#define MainFrameLandscape CGRectMake(0.0f, 0.0f, MainFrame.size.height, MainFrame.size.width)

@interface LocalRecorder ()

-(void)readyInit;

@end

@implementation LocalRecorder

@synthesize directoryPath;
@synthesize ParentID, startRecordFlag;
@synthesize videoPath, videoFileName;
@synthesize thumbnailImage;

- (id)init
{
    if (self = [super init])
    {
        startRecordFlag = NO;
        isVideo = YES;
        [self readyInit];
    }
    return self;
}

-(NSString *)returnFormatString:(NSString *)str
{
    return [str stringByReplacingOccurrencesOfString:@" " withString:@" "];
}

// 获取视频的缩略图
- (void)thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time {
    
    if (!videoURL) {
        return;
    }
    
    NSLog(@"thumbnailImageForVideo, videoURL=%@", videoURL);
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
    NSParameterAssert(asset);
    AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    assetImageGenerator.appliesPreferredTrackTransform = YES;
    assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
    CGImageRef thumbnailImageRef = NULL;
    CFTimeInterval thumbnailImageTime = time;
    NSError *thumbnailImageGenerationError = nil;
    thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 1) actualTime:NULL error:&thumbnailImageGenerationError];
    if (!thumbnailImageRef)
        NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
    
    UIImage *image = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef] : [[UIImage alloc] init];
    
    self.thumbnailImage = image;
    
    [image release];
    CGImageRelease(thumbnailImageRef);
    [assetImageGenerator release];
    [asset release];
}

- (void)saveImageToFile:(UIImage *)image :(NSString *)filePath {
    
    NSData *imgData = UIImageJPEGRepresentation(image, 0.1f);
    
    [imgData writeToFile:filePath atomically:YES];
}

- (void)saveVideoToCameraRoll:(NSString *)currentVideoPath {
    NSLog(@"saveVideoToCameraRoll  currentVideoPath:%@", currentVideoPath);
    NSString *filePath = currentVideoPath;
    
    //NSLog(@"filePath=%@", filePath);
    UISaveVideoAtPathToSavedPhotosAlbum(filePath, self,
                                        @selector(video:didFinishSavingWithError:contextInfo:), nil);
}


- (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo: (void *)contextInfo
{
    // Was there an error?
    if (error != NULL)
    {
        // Show error message...
        NSLog(@"SAVE FAILED");
        /*
         UIAlertView * resultAlert = [[[UIAlertView alloc] initWithTitle:NSLocalizedString(@"Failed to save photo to camera roll.", nil) message:nil delegate:self cancelButtonTitle:NSLocalizedString(@"OK", nil) otherButtonTitles: nil] autorelease];
         resultAlert.tag = 1;
         [resultAlert show];
         */
    }
    else  // No errors
    {
        // Show message image successfully saved
        NSLog(@"SAVE SUCCEEDED");
        /*
         UIAlertView * resultAlert = [[[UIAlertView alloc] initWithTitle:NSLocalizedString(@"Succeeded to save photo to camera roll.", nil) message:nil delegate:self cancelButtonTitle:NSLocalizedString(@"OK", nil) otherButtonTitles: nil] autorelease];
         resultAlert.tag = 1;
         [resultAlert show];
         */
        
    }
}

-(void)startRecord__:(NSString *)fileName VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    
    NSString *thumbnailImageFileName = [NSString stringWithFormat:@"%@.t.jpg", fileName];
    NSString *recordFileName = [NSString stringWithFormat:@"%@.mp4", fileName];
    NSString *thumbnailImagePath = [self.directoryPath stringByAppendingPathComponent:thumbnailImageFileName];
    ;
    NSString *recordPath = [self.directoryPath stringByAppendingPathComponent:recordFileName];
    
    NSLog(@"thumbnailImageFileName=%@", thumbnailImageFileName);
    NSLog(@"recordFileName=%@", recordFileName);
    NSLog(@"thumbnailImagePath=%@", thumbnailImagePath);
    NSLog(@"recordPath=%@", recordPath);
    
    if (self.startRecordFlag) {
        //[self startRecordFrame:adaptor0 didReceiveRawDataFrame:imgData VideoWidth:width VideoHeight:height];
        
        //return;
    }
    
    self.startRecordFlag = YES;
    
    isVideo = YES;
    
    if([[NSFileManager defaultManager] fileExistsAtPath:recordPath])
    {
        //remove the old one
        [[NSFileManager defaultManager] removeItemAtPath:recordPath error:nil];
    }
    
    //for clearing all image
    [imageArr removeAllObjects];
    
    if (isVideo == YES)
    {
        printf("=========== isVideo == YES\n");
        
        NSError *error = nil;
        
        unlink([recordPath UTF8String]);
        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:recordPath]
                                                               fileType:AVFileTypeQuickTimeMovie
                                                                  error:&error];
        NSParameterAssert(videoWriter);
        if(error)
            NSLog(@"error >>>>");
        
        //NSLog(@"error = %@", [error localizedDescription]);
        
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:height], AVVideoHeightKey, nil];
        AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
        NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
        
        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                         assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        
        if ([videoWriter canAddInput:writerInput])
            NSLog(@"ok");
        else
            NSLog(@"……");
        
        [videoWriter addInput:writerInput];
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
        
        dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
        int __block frame = 0;
        
        startTime = CFAbsoluteTimeGetCurrent();
        
        [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
            printf("writerInput is->>>>>>>>>%i\n",[writerInput isReadyForMoreMediaData]);
            while ([writerInput isReadyForMoreMediaData])
            {
                //printf("imageArr->%d,isVideo ---->%i ", [imageArr count], isVideo);
                if(isVideo == NO)
                {
                    isVideo = YES;
                    [writerInput markAsFinished];
                    //[videoWriter finishWriting];
                    [videoWriter finishWritingWithCompletionHandler:^{
                        
                        /*
                         printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                         if ([ParentID respondsToSelector:@selector(recordCompleted)])
                         {
                         printf("=========== ParentID performSelectorOnMainThread\n");
                         [ParentID performSelectorOnMainThread:@selector(recordCompleted) withObject:nil waitUntilDone:YES];
                         }
                         */
                        
                        
                        /*
                         printf("=========== albumPath UISaveVideoAtPathToSavedPhotosAlbum\n");
                         NSString *albumPath = [[NSString alloc] init];
                         NSString * filePath = [albumPath stringByAppendingPathComponent:moviePath];
                         UISaveVideoAtPathToSavedPhotosAlbum(filePath, self,
                         @selector(video:didFinishSavingWithError:contextInfo:), nil);
                         [albumPath release];
                         */
                        
                        [videoWriter release];
                        
                        printf("=========== videoWriter save thumbnailImage \n");
                        
                        
                        [self thumbnailImageForVideo:[NSURL fileURLWithPath:[self.directoryPath stringByAppendingPathComponent:recordFileName]] atTime:1];
                        if (self.thumbnailImage) {
                            [self saveImageToFile:self.thumbnailImage :thumbnailImagePath];
                        }
                        
                        NSLog(@"self.thumbnailImage.size.width=%f, self.thumbnailImage.size.height=%f", self.thumbnailImage.size.width, self.thumbnailImage.size.height);
                        NSData *dataObj = UIImageJPEGRepresentation(self.thumbnailImage, 0.1);
                        NSLog(@"self.thumbnailImage dataObj.length=%d", dataObj.length);
                        
                        printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                        dispatch_async(dispatch_get_main_queue(), ^{
                            
#if defined(LOCAL_SOURCE2)
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted2:)])
                                [self.ParentID recordCompleted2:fileName];
#else
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted)])
                                [self.ParentID recordCompleted];
#endif
                        });
                    }];
                    //[videoWriter release];
                    
                    
                    break;
                }
                
                if (!isNewImagePixelBuffer) {
                    NSLog(@"isNewImagePixelBuffer=%d, waiting!", isNewImagePixelBuffer);
                }
                else
                {
                    if (imagePixelBuffer)
                    {
                        CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - startTime) * 1000;
                        CMTime currentSampleTime = CMTimeMake((int)interval, 1000);
                        
                        //if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 25)])
                        if(![adaptor appendPixelBuffer:imagePixelBuffer withPresentationTime:currentSampleTime])
                            printf("FAIL");
                        else
                        {
                            //printf(" |imageArr.count=%d|, removing buffer……", imageArr.count);
                            
                            NSLog(@" >>>>>>>>>>>>>>>>>>> appendPixelBuffer >>>>>>>>>>>>>>>>>>>");
                            
                            isNewImagePixelBuffer = NO;
                            
                            dispatch_async(dispatch_get_main_queue(), ^{
                                
                                if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordRunning)])
                                    [self.ParentID recordRunning];
                            });
                        }
                    }
                    
                }
                //printf(" total frame %d\n", frame);
                
                usleep(10*1000);
            }
        }];
        
        printf("=========== writerInput requestMediaDataWhenReadyOnQueue\n");
    }
}

-(void)startRecordH264Image__:(NSString *)fileName VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    
    NSString *thumbnailImageFileName = [NSString stringWithFormat:@"%@.t.jpg", fileName];
    NSString *recordFileName = [NSString stringWithFormat:@"%@.mp4", fileName];
    NSString *thumbnailImagePath = [self.directoryPath stringByAppendingPathComponent:thumbnailImageFileName];
    ;
    NSString *recordPath = [self.directoryPath stringByAppendingPathComponent:recordFileName];
    
    NSLog(@"thumbnailImageFileName=%@", thumbnailImageFileName);
    NSLog(@"recordFileName=%@", recordFileName);
    NSLog(@"thumbnailImagePath=%@", thumbnailImagePath);
    NSLog(@"recordPath=%@", recordPath);
    
    if (self.startRecordFlag) {
        //[self startRecordFrame:adaptor0 didReceiveRawDataFrame:imgData VideoWidth:width VideoHeight:height];
        
        //return;
    }
    
    self.startRecordFlag = YES;
    
    isVideo = YES;
    
    if([[NSFileManager defaultManager] fileExistsAtPath:recordPath])
    {
        //remove the old one
        [[NSFileManager defaultManager] removeItemAtPath:recordPath error:nil];
    }
    
    //for clearing all image
    [imageArr removeAllObjects];
    
    if (isVideo == YES)
    {
        printf("=========== isVideo == YES\n");
        
        NSError *error = nil;
        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:recordPath]
                                                               fileType:AVFileTypeMPEG4
                                                                  error:&error];
        if (error) {
            NSLog(@"error >>>>");
            
            //NSLog(@"error = %@", [error localizedDescription]);
        }
        NSParameterAssert(videoWriter);
        
        NSLog(@"NSParameterAssert(videoWriter)");
        
        
        NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
                                        AVVideoWidthKey: [NSNumber numberWithInt:width],
                                        AVVideoHeightKey: [NSNumber numberWithInt:height]};
        
        AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                             outputSettings:videoSettings];
        
        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                         sourcePixelBufferAttributes:nil];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        [videoWriter addInput:writerInput];
        
        //Start a session:
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
        
        CVPixelBufferRef buffer;
        CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
        
        //CMTime presentTime = CMTimeMake(0, fps);
        
        NSLog(@"CVPixelBufferPoolCreatePixelBuffer");
        
        //CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
        
        dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
        int __block frame = 0;
        
        startTime = CFAbsoluteTimeGetCurrent();
        
        [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
            printf("writerInput is->>>>>>>>>%i\n",[writerInput isReadyForMoreMediaData]);
            CVPixelBufferRef buffer = NULL;
            while ([writerInput isReadyForMoreMediaData])
            {
                //printf("imageArr->%d,isVideo ---->%i ", [imageArr count], isVideo);
                if([imageArr count] == 0 && isVideo == NO)
                {
                    isVideo = YES;
                    [writerInput markAsFinished];
                    //[videoWriter finishWriting];
                    [videoWriter finishWritingWithCompletionHandler:^{
                        
                        /*
                         printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                         if ([ParentID respondsToSelector:@selector(recordCompleted)])
                         {
                         printf("=========== ParentID performSelectorOnMainThread\n");
                         [ParentID performSelectorOnMainThread:@selector(recordCompleted) withObject:nil waitUntilDone:YES];
                         }
                         */
                        
                        
                        /*
                         printf("=========== albumPath UISaveVideoAtPathToSavedPhotosAlbum\n");
                         NSString *albumPath = [[NSString alloc] init];
                         NSString * filePath = [albumPath stringByAppendingPathComponent:moviePath];
                         UISaveVideoAtPathToSavedPhotosAlbum(filePath, self,
                         @selector(video:didFinishSavingWithError:contextInfo:), nil);
                         [albumPath release];
                         */
                        
                        [videoWriter release];
                        
                        printf("=========== videoWriter save thumbnailImage \n");
                        
                        
                        [self thumbnailImageForVideo:[NSURL fileURLWithPath:[self.directoryPath stringByAppendingPathComponent:recordFileName]] atTime:1];
                        if (self.thumbnailImage) {
                            [self saveImageToFile:self.thumbnailImage :thumbnailImagePath];
                        }
                        
                        NSLog(@"self.thumbnailImage.size.width=%f, self.thumbnailImage.size.height=%f", self.thumbnailImage.size.width, self.thumbnailImage.size.height);
                        NSData *dataObj = UIImageJPEGRepresentation(self.thumbnailImage, 0.1);
                        NSLog(@"self.thumbnailImage dataObj.length=%d", dataObj.length);
                        
                        printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                        dispatch_async(dispatch_get_main_queue(), ^{
                            
#if defined(LOCAL_SOURCE2)
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted2:)])
                                [self.ParentID recordCompleted2:fileName];
#else
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted)])
                                [self.ParentID recordCompleted];
#endif
                            
#if defined(P2PPETWANT)
                            [self saveVideoToCameraRoll:[self.directoryPath stringByAppendingPathComponent:recordFileName]];
#endif
                        });
                    }];
                    //[videoWriter release];
                    
                    
                    if (buffer)
                    {
                        CFRelease(buffer);
                        buffer = NULL;
                    }
                    
                    break;
                }
                
                if ([imageArr count] == 0 && isVideo == YES)
                {
                    //[self captureImage];
                }
                else
                {
                    
                    if (buffer==NULL)
                    {
                        UIImage *image = (UIImage *)[imageArr objectAtIndex:0];
                        CGImageRef cgImageRef = [image CGImage];
                        buffer = [self pixelBufferFromCGImage21:cgImageRef VideoWidth:width VideoHeight:height];
                        
                        //NSLog(@"buffer 1 :%@", buffer);
                    }
                    
                    if (buffer)
                    {
                        CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - startTime) * 1000;
                        CMTime currentSampleTime = CMTimeMake((int)interval, 1000);
                        
                        //if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 25)])
                        if(![adaptor appendPixelBuffer:buffer withPresentationTime:currentSampleTime])
                            printf("FAIL");
                        else
                        {
                            
                            ++frame;
                            
                            if (imageArr.count > 0) {
                                //printf(" |imageArr.count=%d|, removing buffer……", imageArr.count);
                                [imageArr removeObjectAtIndex:0];
                            }
                            
                            CFRelease(buffer);
                            buffer = NULL;
                            
                            
                            dispatch_async(dispatch_get_main_queue(), ^{
                                
                                if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordRunning)])
                                    [self.ParentID recordRunning];
                            });
                        }
                    }
                    
                }
                //printf(" total frame %d\n", frame);
                
                usleep(10*1000);
            }
        }];
        
        printf("=========== writerInput requestMediaDataWhenReadyOnQueue\n");
    }
}


-(void)startRecordH264__:(NSString *)fileName didReceiveRawDataFrame:(const char *)imgData VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    
    NSString *thumbnailImageFileName = [NSString stringWithFormat:@"%@.t.jpg", fileName];
    NSString *recordFileName = [NSString stringWithFormat:@"%@.mp4", fileName];
    NSString *thumbnailImagePath = [self.directoryPath stringByAppendingPathComponent:thumbnailImageFileName];
    ;
    NSString *recordPath = [self.directoryPath stringByAppendingPathComponent:recordFileName];
    
    NSLog(@"thumbnailImageFileName=%@", thumbnailImageFileName);
    NSLog(@"recordFileName=%@", recordFileName);
    NSLog(@"thumbnailImagePath=%@", thumbnailImagePath);
    NSLog(@"recordPath=%@", recordPath);
    
    if (self.startRecordFlag) {
        //[self startRecordFrame:adaptor0 didReceiveRawDataFrame:imgData VideoWidth:width VideoHeight:height];
        
        //return;
    }
    
    self.startRecordFlag = YES;
    
    isVideo = YES;
    
    if([[NSFileManager defaultManager] fileExistsAtPath:recordPath])
    {
        //remove the old one
        [[NSFileManager defaultManager] removeItemAtPath:recordPath error:nil];
    }
    
    //for clearing all image
    [imageArr removeAllObjects];
    
    if (isVideo == YES)
    {
        printf("=========== isVideo == YES\n");
        
        NSError *error = nil;
        
        unlink([recordPath UTF8String]);
        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:recordPath]
                                                               fileType:AVFileTypeQuickTimeMovie
                                                                  error:&error];
        NSParameterAssert(videoWriter);
        if(error)
            NSLog(@"error >>>>");
        
        //NSLog(@"error = %@", [error localizedDescription]);
        
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:height], AVVideoHeightKey, nil];
        AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
        NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
        
        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                         assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        
        if ([videoWriter canAddInput:writerInput])
            NSLog(@"ok");
        else
            NSLog(@"……");
        
        [videoWriter addInput:writerInput];
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
        
        dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
        int __block frame = 0;
        
        startTime = CFAbsoluteTimeGetCurrent();
        
        [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
            printf("writerInput is->>>>>>>>>%i\n",[writerInput isReadyForMoreMediaData]);
            CVPixelBufferRef buffer = NULL;
            while ([writerInput isReadyForMoreMediaData])
            {
                //printf("imageArr->%d,isVideo ---->%i ", [imageArr count], isVideo);
                if([imageArr count] == 0 && isVideo == NO)
                {
                    isVideo = YES;
                    [writerInput markAsFinished];
                    //[videoWriter finishWriting];
                    [videoWriter finishWritingWithCompletionHandler:^{
                        
                        /*
                         printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                         if ([ParentID respondsToSelector:@selector(recordCompleted)])
                         {
                         printf("=========== ParentID performSelectorOnMainThread\n");
                         [ParentID performSelectorOnMainThread:@selector(recordCompleted) withObject:nil waitUntilDone:YES];
                         }
                         */
                        
                        
                        /*
                         printf("=========== albumPath UISaveVideoAtPathToSavedPhotosAlbum\n");
                         NSString *albumPath = [[NSString alloc] init];
                         NSString * filePath = [albumPath stringByAppendingPathComponent:moviePath];
                         UISaveVideoAtPathToSavedPhotosAlbum(filePath, self,
                         @selector(video:didFinishSavingWithError:contextInfo:), nil);
                         [albumPath release];
                         */
                        
                        [videoWriter release];
                        
                        printf("=========== videoWriter save thumbnailImage \n");
                        
                        
                        [self thumbnailImageForVideo:[NSURL fileURLWithPath:[self.directoryPath stringByAppendingPathComponent:recordFileName]] atTime:1];
                        if (self.thumbnailImage) {
                            [self saveImageToFile:self.thumbnailImage :thumbnailImagePath];
                        }
                        
                        NSLog(@"self.thumbnailImage.size.width=%f, self.thumbnailImage.size.height=%f", self.thumbnailImage.size.width, self.thumbnailImage.size.height);
                        NSData *dataObj = UIImageJPEGRepresentation(self.thumbnailImage, 0.1);
                        NSLog(@"self.thumbnailImage dataObj.length=%d", dataObj.length);
                        
                        printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                        dispatch_async(dispatch_get_main_queue(), ^{
                            
#if defined(LOCAL_SOURCE2)
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted2:)])
                                [self.ParentID recordCompleted2:fileName];
#else
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted)])
                                [self.ParentID recordCompleted];
#endif
                            
                            
                            
                        });
                    }];
                    //[videoWriter release];
                    
                    
                    if (buffer)
                    {
                        CFRelease(buffer);
                        buffer = NULL;
                    }
                    
                    break;
                }
                
                if ([imageArr count] == 0 && isVideo == YES)
                {
                    //[self captureImage];
                }
                else
                {
                    
                    if (buffer==NULL)
                    {
                        UIImage *image = (UIImage *)[imageArr objectAtIndex:0];
                        CGImageRef cgImageRef = [image CGImage];
                        buffer = [self pixelBufferFromCGImage2:cgImageRef VideoWidth:width VideoHeight:height];
                        
                        //NSLog(@"buffer 1 :%@", buffer);
                    }
                    
                    if (buffer)
                    {
                        CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - startTime) * 1000;
                        CMTime currentSampleTime = CMTimeMake((int)interval, 1000);
                        
                        //if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 25)])
                        if(![adaptor appendPixelBuffer:buffer withPresentationTime:currentSampleTime])
                            printf("FAIL");
                        else
                        {
                            
                            ++frame;
                            
                            if (imageArr.count > 0) {
                                //printf(" |imageArr.count=%d|, removing buffer……", imageArr.count);
                                [imageArr removeObjectAtIndex:0];
                            }
                            
                            CFRelease(buffer);
                            buffer = NULL;
                            
                            
                            dispatch_async(dispatch_get_main_queue(), ^{
                                
                                if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordRunning)])
                                    [self.ParentID recordRunning];
                            });
                        }
                    }
                    
                }
                //printf(" total frame %d\n", frame);
                
                usleep(10*1000);
            }
        }];
        
        printf("=========== writerInput requestMediaDataWhenReadyOnQueue\n");
    }
}



-(void)startRecordJPEG__:(NSString *)fileName didReceiveRawDataFrame:(const char *)imgData DataSize:(NSInteger)size
{
    NSString *thumbnailImageFileName = [NSString stringWithFormat:@"%@.t.jpg", fileName];
    NSString *recordFileName = [NSString stringWithFormat:@"%@.mp4", fileName];
    NSString *thumbnailImagePath = [self.directoryPath stringByAppendingPathComponent:thumbnailImageFileName];
    ;
    NSString *recordPath = [self.directoryPath stringByAppendingPathComponent:recordFileName];
    
    NSLog(@"thumbnailImageFileName=%@", thumbnailImageFileName);
    NSLog(@"recordFileName=%@", recordFileName);
    NSLog(@"thumbnailImagePath=%@", thumbnailImagePath);
    NSLog(@"recordPath=%@", recordPath);
    
    if (self.startRecordFlag) {
        //[self startRecordFrame:adaptor0 didReceiveRawDataFrame:imgData VideoWidth:width VideoHeight:height];
        
        //return;
    }
    
    self.startRecordFlag = YES;
    
    if([[NSFileManager defaultManager] fileExistsAtPath:recordPath])
    {
        //remove the old one
        [[NSFileManager defaultManager] removeItemAtPath:recordPath error:nil];
    }
    
    //for clearing all image
    [imageArr removeAllObjects];
    
    if (isVideo == YES)
    {
        NSData *data = [[NSData alloc] initWithBytes:imgData length:size];
        UIImage *img = [[UIImage alloc] initWithData:data];
        
        NSInteger width = img.size.width;
        NSInteger height = img.size.height;
        
        NSError *error = nil;
        
        unlink([recordPath UTF8String]);
        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:recordPath]
                                                               fileType:AVFileTypeMPEG4
                                                                  error:&error];
        NSParameterAssert(videoWriter);
        if(error)
            NSLog(@"error >>>>");
        
        //NSLog(@"error = %@", [error localizedDescription]);
        
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:height], AVVideoHeightKey, nil];
        AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
        NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
        
        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                         assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        
        if ([videoWriter canAddInput:writerInput])
            NSLog(@"ok");
        else
            NSLog(@"……");
        
        [videoWriter addInput:writerInput];
        
        [videoWriter startWriting];
        [videoWriter startSessionAtSourceTime:kCMTimeZero];
        
        dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
        int __block frame = 0;
        
        startTime = CFAbsoluteTimeGetCurrent();
        
        [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
            printf("writerInput is->>>>>>>>>%i\n",[writerInput isReadyForMoreMediaData]);
            CVPixelBufferRef buffer = NULL;
            while ([writerInput isReadyForMoreMediaData])
            {
                //printf("imageArr->%d,isVideo ---->%i ", [imageArr count], isVideo);
                if([imageArr count] == 0 && isVideo == NO)
                {
                    isVideo = YES;
                    [writerInput markAsFinished];
                    //[videoWriter finishWriting];
                    [videoWriter finishWritingWithCompletionHandler:^{
                        
                        /*
                         printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                         if ([ParentID respondsToSelector:@selector(recordCompleted)])
                         {
                         printf("=========== ParentID performSelectorOnMainThread\n");
                         [ParentID performSelectorOnMainThread:@selector(recordCompleted) withObject:nil waitUntilDone:YES];
                         }
                         */
                        
                        
                        /*
                         printf("=========== albumPath UISaveVideoAtPathToSavedPhotosAlbum\n");
                         NSString *albumPath = [[NSString alloc] init];
                         NSString * filePath = [albumPath stringByAppendingPathComponent:moviePath];
                         UISaveVideoAtPathToSavedPhotosAlbum(filePath, self,
                         @selector(video:didFinishSavingWithError:contextInfo:), nil);
                         [albumPath release];
                         */
                        
                        [videoWriter release];
                        
                        printf("=========== videoWriter save thumbnailImage \n");
                        
                        
                        [self thumbnailImageForVideo:[NSURL fileURLWithPath:[self.directoryPath stringByAppendingPathComponent:recordFileName]] atTime:1];
                        if (self.thumbnailImage) {
                            [self saveImageToFile:self.thumbnailImage :thumbnailImagePath];
                        }
                        
                        NSLog(@"self.thumbnailImage.size.width=%f, self.thumbnailImage.size.height=%f", self.thumbnailImage.size.width, self.thumbnailImage.size.height);
                        NSData *dataObj = UIImageJPEGRepresentation(self.thumbnailImage, 0.1);
                        NSLog(@"self.thumbnailImage dataObj.length=%d", dataObj.length);
                        
                        printf("=========== videoWriter finishWritingWithCompletionHandler\n");
                        dispatch_async(dispatch_get_main_queue(), ^{
                            
#if defined(LOCAL_SOURCE2)
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted2:)])
                                [self.ParentID recordCompleted2:fileName];
#else
                            if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordCompleted)])
                                [self.ParentID recordCompleted];
#endif
                        });
                    }];
                    //[videoWriter release];
                    
                    
                    if (buffer)
                    {
                        CFRelease(buffer);
                        buffer = NULL;
                    }
                    
                    break;
                }
                if ([imageArr count] == 0 && isVideo == YES)
                {
                    //[self captureImage];
                }
                else
                {
                    if (buffer==NULL)
                    {
                        buffer = [self pixelBufferFromCGImage4:[[imageArr objectAtIndex:0] CGImage]];
                        //NSLog(@"buffer 1 :%@", buffer);
                        
                    }
                    
                    if (buffer)
                    {
                        CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - startTime) * 1000;
                        CMTime currentSampleTime = CMTimeMake((int)interval, 1000);
                        //NSLog(@"buffer 2");
                        //if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 25)])
                        if(![adaptor appendPixelBuffer:buffer withPresentationTime:currentSampleTime])
                            printf("FAIL");
                        else
                        {
                            ++frame;
                            
                            if (imageArr.count > 0) {
                                //printf(" |imageArr.count=%d|, removing buffer……", imageArr.count);
                                [imageArr removeObjectAtIndex:0];
                            }
                            
                            CFRelease(buffer);
                            buffer = NULL;
                            
                            dispatch_async(dispatch_get_main_queue(), ^{
                                
                                if (self.ParentID && [self.ParentID respondsToSelector:@selector(recordRunning)])
                                    [self.ParentID recordRunning];
                            });
                        }
                    }
                    
                }
                //printf(" total frame %d\n", frame);
                
                usleep(10*1000);
            }
        }];
        
        [data release];
        [img release];
        data = nil;
        img = nil;
    }
}


- (void)setJPEGImgFrame:(const char *)imgData DataSize:(NSInteger)size
{
    if (size <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    
    NSData *data = [[NSData alloc] initWithBytes:imgData length:size];
    UIImage *img = [[UIImage alloc] initWithData:data];
    
    [imageArr addObject:img];
    
    [data release];
    [img release];
    data = nil;
    img = nil;
    
    [aLock unlock];
    [aLock release];
    [pool release];
}


- (UIImage *) displayImage:(CVImageBufferRef)imageBuffer
{
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    CVImageBufferRef buffer = imageBuffer;
    
    CVPixelBufferLockBaseAddress(buffer, 0);
    
    //從 CVImageBufferRef 取得影像的細部資訊
    uint8_t *base;
    size_t width, height, bytesPerRow;
    base = CVPixelBufferGetBaseAddress(buffer);
    width = CVPixelBufferGetWidth(buffer);
    height = CVPixelBufferGetHeight(buffer);
    bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
    
    //利用取得影像細部資訊格式化 CGContextRef
    CGColorSpaceRef colorSpace;
    CGContextRef cgContext;
    colorSpace = CGColorSpaceCreateDeviceRGB();
    cgContext = CGBitmapContextCreate (base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    
    CGColorSpaceRelease(colorSpace);
    
    //透過 CGImageRef 將 CGContextRef 轉換成 UIImage
    CGImageRef cgImage;
    UIImage *image;
    cgImage = CGBitmapContextCreateImage(cgContext);
    image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    CGContextRelease(cgContext);
    
    CVPixelBufferUnlockBaseAddress(buffer, 0);
    
    NSLog(@"displayImage imageArr.count:%lu, image.size.width:%f, image.size.height:%f",
          (unsigned long)imageArr.count, image.size.width, image.size.height);
    
    [imageArr addObject:image];
    
    
    //        NSString *fileName = [Utilities documentsPath:[NSString stringWithFormat:@"image%i.png",tmp]];
    //    tmp ++;
    //    [UIImagePNGRepresentation(image) writeToFile:fileName atomically:YES];
    //    NSError *error;
    //    NSFileManager *fileMgr = [NSFileManager defaultManager];
    //    NSLog(@"Documents directory: %@", [fileMgr contentsOfDirectoryAtPath:fileName error:&error]);
    
    //成功轉換成 UIImage
    //    self.imageView.image = [UIImage imageNamed:@"image3"];
    //    [self.imageView setImage:image];
    
    [aLock unlock];
    [aLock release];
    [pool release];
    
    return image;
}

/*
 - (UIImage *) displayImage2:(const char *)imgData VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
 {
 
 NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
 NSLock *aLock = [NSLock new];
 [aLock lock];
 
 
 
 //利用取得影像細部資訊格式化 CGContextRef
 CGColorSpaceRef colorSpace;
 CGContextRef cgContext;
 colorSpace = CGColorSpaceCreateDeviceRGB();
 cgContext = CGBitmapContextCreate (base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
 
 CGColorSpaceRelease(colorSpace);
 
 //透過 CGImageRef 將 CGContextRef 轉換成 UIImage
 CGImageRef cgImage;
 UIImage *image;
 cgImage = CGBitmapContextCreateImage(cgContext);
 image = [UIImage imageWithCGImage:cgImage];
 CGImageRelease(cgImage);
 CGContextRelease(cgContext);
 
 
 NSLog(@"displayImage imageArr.count:%lu, image.size.width:%f, image.size.height:%f",
 (unsigned long)imageArr.count, image.size.width, image.size.height);
 
 [imageArr addObject:image];
 
 
 //        NSString *fileName = [Utilities documentsPath:[NSString stringWithFormat:@"image%i.png",tmp]];
 //    tmp ++;
 //    [UIImagePNGRepresentation(image) writeToFile:fileName atomically:YES];
 //    NSError *error;
 //    NSFileManager *fileMgr = [NSFileManager defaultManager];
 //    NSLog(@"Documents directory: %@", [fileMgr contentsOfDirectoryAtPath:fileName error:&error]);
 
 //成功轉換成 UIImage
 //    self.imageView.image = [UIImage imageNamed:@"image3"];
 //    [self.imageView setImage:image];
 
 [aLock unlock];
 [aLock release];
 [pool release];
 
 return image;
 }
 */

- (void)setImgFrame:(const char *)imgData VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    if (width <= 0 || height <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, (const unsigned char *) imgData, width * height * 3,kCFAllocatorNull);
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef imgRef = CGImageCreate(width, height, 8, 24, width * 3, colorSpace, bitmapInfo, provider, NULL, YES, kCGRenderingIntentDefault);
    
    UIImage *img = [[UIImage alloc] initWithCGImage:imgRef];
    
    [imageArr addObject:img];
    
    [img release];
    
    CGImageRelease(imgRef);
    CGColorSpaceRelease(colorSpace);
    CGDataProviderRelease(provider);
    CFRelease(data);
    
    
    [aLock unlock];
    [aLock release];
    [pool release];
}

- (void)setImgFrame0:(const char *)imgData VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    if (width <= 0 || height <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, imgData, width * height * 3, NULL);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef imgRef = CGImageCreate(width, height, 8, 24, width * 3, colorSpace, kCGBitmapByteOrderDefault, provider, NULL, true,  kCGRenderingIntentDefault);
    
    UIImage *img = [[UIImage alloc] initWithCGImage:imgRef];
    
    [imageArr addObject:img];
    
    [img release];
    
    CGImageRelease(imgRef);
    CGColorSpaceRelease(colorSpace);
    CGDataProviderRelease(provider);
    
    
    [aLock unlock];
    [aLock release];
    [pool release];
}

- (void)setImgFrame1:(UIImage *)img VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    
    if (width <= 0 || height <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    [imageArr addObject:img];
    
    [aLock unlock];
    [aLock release];
    [pool release];
}

- (void)setImgFrame2:(const char *)imgData VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    if (width <= 0 || height <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, imgData, width * height * 3, NULL);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef imgRef = CGImageCreate(width, height, 8, 24, width * 3, colorSpace, kCGBitmapByteOrderDefault, provider, NULL, true,  kCGRenderingIntentDefault);
    
    
    UIImage *img = [UIImage imageWithCGImage:imgRef];
    
    [imageArr addObject:img];
    
    [img release];
    
    if (imgRef != nil) {
        CGImageRelease(imgRef);
        imgRef = nil;
    }
    
    if (colorSpace != nil) {
        CGColorSpaceRelease(colorSpace);
        colorSpace = nil;
    }
    
    if (provider != nil) {
        CGDataProviderRelease(provider);
        provider = nil;
    }
    
    [aLock unlock];
    [aLock release];
    [pool release];
}


- (UIImage *)imgFrame:(NSInteger)index
{
    UIImage *img = nil;
    
    NSLog(@"imageArr.count=%d", imageArr.count);
    if ([imageArr count] > 0) {
        img = [imageArr objectAtIndex:index];
        
        NSLog(@"img.size.width=%f, img.size.height=%f", img.size.width, img.size.height);
    }
    
    return img;
}


-(CVPixelBufferRef)pixelBufferFromCGImage2:(CGImageRef)image VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
    
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, width, height, 8, 4*width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    
    NSParameterAssert(context);
    
    //NSLog(@"w:%zu", CGImageGetWidth(image));
    //NSLog(@"h:%zu", CGImageGetHeight(image));
    
    //NSLog(@"width:%ld, height:%ld", (long)width, (long)height);
    
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
    
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    return pxbuffer;
}

-(CVPixelBufferRef)pixelBufferFromCGImage21:(CGImageRef)image VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
                              (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,
                                          height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, width,
                                                 height, 8, 4*width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    
    CGContextDrawImage(context, CGRectMake(0 + (width-CGImageGetWidth(image))/2,
                                           (height-CGImageGetHeight(image))/2,
                                           CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
    return pxbuffer;
    
}

- (CVPixelBufferRef) pixelBufferFromCGImage4: (CGImageRef) image
{
    
    CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
    
    CVPixelBufferRef pxbuffer = NULL;
    
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
                                          frameSize.height,  kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
                                          &pxbuffer);
    
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
    return pxbuffer;
}


- (void)setImagePixelBufferFrame:(CVPixelBufferRef)buffer VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height {
    
    imagePixelBuffer = buffer;
    isNewImagePixelBuffer = YES;
}

- (void)setUIImageFrame:(UIImage *)img VideoWidth:(NSInteger)width VideoHeight:(NSInteger)height
{
    if (width <= 0 || height <= 0) {
        return;
    }
    
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    NSLock *aLock = [NSLock new];
    [aLock lock];
    
    [imageArr addObject:img];
    
    [aLock unlock];
    [aLock release];
    [pool release];
}



-(void)stopRecord
{
    printf("stopRecord is called\n");
    isVideo = NO;
}

-(void)readyGo:(UIView *)aView
{
    targetView = aView;
    
    //for speeding up the process, we don't want to preset it every time in the pixelBufferFromCGImage function
    options = [NSDictionary dictionaryWithObjectsAndKeys:
               [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
               [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    [options retain];
}

-(void)readyInit
{
    imageArr = [[NSMutableArray alloc] init];
}

-(void)dealloc
{
    printf("dealloc in %s\n", [[[self class] description] UTF8String]);
    //[self.directoryPath release];
    //self.directoryPath = nil;
    self.ParentID = nil;
    self.startRecordFlag = nil;
    [self.videoPath release];
    self.videoPath = nil;
    [self.videoFileName release];
    self.videoFileName = nil;
    [self.thumbnailImage release];
    self.thumbnailImage = nil;
    [imageArr release];
    [options release];
    [super dealloc];
}






@end
