//
//  EpointAVCallViewController.m
//  EpointFrame5
//
//  Created by 李亚春 on 14-1-23.
//  Copyright (c) 2014年 Epoint. All rights reserved.
//

#import "EpointAVCallViewController.h"

@interface EpointAVCallViewController ()

@end

@implementation EpointAVCallViewController

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {
    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
    if (self) {
        // Custom initialization.
    }
    return self;
}

-(id)init
{
    if(self= [super init])
    {
        firstFrame= YES;
        producerFps= 50;
    }
    return self;
}


// Implement loadView to create a view hierarchy programmatically, without using a nib.
- (void)loadView {
    [super loadView];
    [self createControl];
}

- (void)viewDidUnload {
    [super viewDidUnload];
    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
}

#pragma mark -
#pragma mark createControl
- (void)createControl
{
    //UI展示
    self.view.backgroundColor= [UIColor grayColor];
    labelState= [[UILabel alloc] initWithFrame:CGRectMake(10, 20, 220, 30)];
    labelState.backgroundColor= [UIColor clearColor];
    [self.view addSubview:labelState];
    
    btnStartVideo= [[UIButton alloc] initWithFrame:CGRectMake(20, 350, 80, 50)];
    [btnStartVideo setTitle:@"Star"forState:UIControlStateNormal];
    
    
    [btnStartVideo setBackgroundImage:[UIImage imageNamed:@"img_man2.png"] forState:UIControlStateNormal];
    [btnStartVideo addTarget:self action:@selector(startVideoCapture) forControlEvents:UIControlEventTouchUpInside];
    [self.view addSubview:btnStartVideo];
    
    UIButton* stop = [[UIButton alloc] initWithFrame:CGRectMake(120, 350, 80, 50)];
    [stop setTitle:@"Stop"forState:UIControlStateNormal];
    
    [stop setBackgroundImage:[UIImage imageNamed:@"img_man2"] forState:UIControlStateNormal];
    [stop addTarget:self action:@selector(stopVideoCapture:) forControlEvents:UIControlEventTouchUpInside];
    [self.view addSubview:stop];
    
    localView= [[UIView alloc] initWithFrame:CGRectMake(40, 50, 200, 300)];
    [self.view addSubview:localView];

}
#pragma mark -
#pragma mark VideoCapture
- (AVCaptureDevice *)getFrontCamera
{
    //获取前置摄像头设备
    NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in cameras)
    {
        if (device.position == AVCaptureDevicePositionFront)
            return device;
    }
    return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
}
- (void)startVideoCapture
{
    //打开摄像设备，并开始捕抓图像
    [labelState setText:@"Starting Video stream"];
    if(avCaptureDevice|| avCaptureSession)
    {
        [labelState setText:@"Already capturing"];
        return;
    }
    
    if((avCaptureDevice = [self getFrontCamera]) == nil)
    {
        [labelState setText:@"Failed to get valide capture device"];
        return;
    }
    
    NSError *error = nil;
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:avCaptureDevice error:&error];
    if (!videoInput)
    {
        [labelState setText:@"Failed to get video input"];
        avCaptureDevice= nil;
        return;
    }
    
    avCaptureSession = [[AVCaptureSession alloc] init];
    avCaptureSession.sessionPreset = AVCaptureSessionPresetLow;
    [avCaptureSession addInput:videoInput];
    
    // Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Recommended pixel format choices are
    // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange or kCVPixelFormatType_32BGRA.
    // On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
    //
    AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    NSDictionary*settings = [[NSDictionary alloc] initWithObjectsAndKeys:
                             //[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
                             [NSNumber numberWithInt:240], (id)kCVPixelBufferWidthKey,
                             [NSNumber numberWithInt:320], (id)kCVPixelBufferHeightKey,
                             nil];
    avCaptureVideoDataOutput.videoSettings = settings;

    avCaptureVideoDataOutput.minFrameDuration = CMTimeMake(1, producerFps);
    /*We create a serial queue to handle the processing of our frames*/
    dispatch_queue_t queue = dispatch_queue_create("org.doubango.idoubs", NULL);
    [avCaptureVideoDataOutput setSampleBufferDelegate:self queue:queue];
    [avCaptureSession addOutput:avCaptureVideoDataOutput];
    
    AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: avCaptureSession];
    previewLayer.frame = localView.bounds;
    previewLayer.videoGravity= AVLayerVideoGravityResizeAspectFill;
    
    [localView.layer addSublayer: previewLayer];
    
    firstFrame= YES;
    [avCaptureSession startRunning];
    
    [labelState setText:@"Video capture started"];
    
}
- (void)stopVideoCapture:(id)arg
{
    //停止摄像头捕抓
    if(avCaptureSession){
        [avCaptureSession stopRunning];
        avCaptureSession= nil;
        [labelState setText:@"Video capture stopped"];
    }
    avCaptureDevice= nil;
    //移除localView里面的内容
    for(UIView*view in localView.subviews) {
        [view removeFromSuperview];
    }
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //捕捉数据输出 要怎么处理虽你便
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    /*Lock the buffer*/
    if(CVPixelBufferLockBaseAddress(pixelBuffer, 0) == kCVReturnSuccess)
    {
//        UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddress(pixelBuffer);
//        size_t buffeSize = CVPixelBufferGetDataSize(pixelBuffer);
        
        if(firstFrame)
        {
            if(1)
            {
                //第一次数据要求：宽高，类型
//                int width = CVPixelBufferGetWidth(pixelBuffer);
//                int height = CVPixelBufferGetHeight(pixelBuffer);
                
                int pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
                switch (pixelFormat) {
                    case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
                        //TMEDIA_PRODUCER(producer)->video.chroma = tmedia_nv12; // iPhone 3GS or 4
                        NSLog(@"Capture pixel format=NV12");
                        break;
                    case kCVPixelFormatType_422YpCbCr8:
                        //TMEDIA_PRODUCER(producer)->video.chroma = tmedia_uyvy422; // iPhone 3
                        NSLog(@"Capture pixel format=UYUY422");
                        break;
                    default:
                        //TMEDIA_PRODUCER(producer)->video.chroma = tmedia_rgb32;
                        NSLog(@"Capture pixel format=RGB32");
                        break;
                }
                
                firstFrame = NO;
            }
        }
        /*We unlock the buffer*/
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    }
    /*We create an autorelease pool because as we are not in the main_queue our code is
     not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/
    // NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
    //
    //    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    //    /*Lock the image buffer*/
    //    CVPixelBufferLockBaseAddress(imageBuffer,0);
    //    /*Get information about the image*/
    //    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    //    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    //    size_t width = CVPixelBufferGetWidth(imageBuffer);
    //    size_t height = CVPixelBufferGetHeight(imageBuffer);
    //
    //    /*Create a CGImageRef from the CVImageBufferRef*/
    //    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    //    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    //    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    //
    //    /*We release some components*/
    //    CGContextRelease(newContext); 
    //    CGColorSpaceRelease(colorSpace);
    //    
    //    /*We display the result on the custom layer. All the display stuff must be done in the main thread because
    //  UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
    //  we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
    // [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
    // 
    // /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
    //  Same thing as for the CALayer we are not in the main thread so ...*/
    // UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
    // 
    // /*We relase the CGImageRef*/
    // CGImageRelease(newImage);
    // 
    // [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
    // 
    // /*We unlock the  image buffer*/
    // CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    // 
    // [pool drain];
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

@end
