//
//  CameraViewController.m
//  VedioEncoder
//
//  Created by mac on 2017/6/21.
//  Copyright © 2017年 mac. All rights reserved.
//
#import "PushStream.h"
#import "H264Encoder.h"
#import <AVFoundation/AVFoundation.h>
#import "CameraViewController.h"

@interface CameraViewController ()<AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,H264EncoderDelegate>
{
    dispatch_queue_t audioQueue;
    dispatch_queue_t videoQueue;
    NSFileHandle *fileHandle;
    NSString *h264File;
}

@property (nonatomic,strong)AVCaptureSession *session;

//@property (nonatomic,strong)AVCaptureDeviceInput *frontCamera;
@property (nonatomic,strong)AVCaptureDeviceInput *backCamera;
@property (nonatomic,strong)AVCaptureDeviceInput *microPhone;

@property (nonatomic,strong)AVCaptureAudioDataOutput *autoOutput;
@property (nonatomic,strong)AVCaptureVideoDataOutput *videoOutput;

@property (nonatomic,strong)AVCaptureConnection *autoConnection;
@property (nonatomic,strong)AVCaptureConnection *videoConnection;


@property (nonatomic,strong)AVCaptureVideoPreviewLayer *preLayer;



@end

@implementation CameraViewController

- (void)viewDidLoad
{
    [super viewDidLoad];
    h264File = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
    h264File = [h264File stringByAppendingPathComponent:@"11.h264"];
    [[NSFileManager defaultManager] createFileAtPath:h264File contents:nil attributes:nil];
    fileHandle = [NSFileHandle fileHandleForWritingAtPath:h264File];
    audioQueue = dispatch_queue_create("audio", DISPATCH_QUEUE_SERIAL);
    videoQueue = dispatch_queue_create("video", DISPATCH_QUEUE_SERIAL);
    self.session = [[AVCaptureSession alloc]init];
    self.session.sessionPreset = AVCaptureSessionPresetHigh;
    self.backCamera = [[AVCaptureDeviceInput alloc]initWithDevice:[self backCameraDevice] error:nil];
    self.microPhone = [[AVCaptureDeviceInput alloc]initWithDevice:[self microphoneDevice] error:nil];
    
    self.preLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
    self.preLayer.frame = CGRectMake(0, 0, 375, 667);
    //self.preLayer.transform = CATransform3DMakeRotation(M_PI/2.0, 0, 0, 0);
    [self.view.layer addSublayer:self.preLayer];
    
    [self.session addInput:self.backCamera];
    [self.session addInput:self.microPhone];

    self.autoOutput = [[AVCaptureAudioDataOutput alloc]init];
    [self.autoOutput setSampleBufferDelegate:self queue:audioQueue];
    self.videoOutput = [[AVCaptureVideoDataOutput alloc]init];
    self.videoOutput.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]};
    [self.videoOutput setSampleBufferDelegate:self queue:videoQueue];
    
    [self.session addOutput:self.autoOutput];
    [self.session addOutput:self.videoOutput];
    self.autoConnection = [self.autoOutput connectionWithMediaType:AVMediaTypeAudio];
    self.videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
    if([self.videoConnection isVideoOrientationSupported])
    {
        [_videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    }
    
    
    H264Encoder *encoder = [H264Encoder defaultEncoder];
    [encoder initConfiguration];
    [encoder setDelegate:self];
    
    
    [self.session startRunning];
    // Do any additional setup after loading the view.
}

- (AVCaptureDevice *)backCameraDevice
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for(AVCaptureDevice *device in devices)
    {
        if(device.position == AVCaptureDevicePositionBack)
        {
            return device;
        }
    }
    return nil;
}

- (AVCaptureDevice *)microphoneDevice
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
    return [devices firstObject];
}


#pragma - mark SampleBuffer
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    if(connection == self.autoConnection)
    {
        
    }else if(connection == self.videoConnection)
    {
        [[H264Encoder defaultEncoder] encoderSampleBuffer:sampleBuffer];
    }
}


- (void)getSpsPps:(NSData *)sps pps:(NSData *)pps
{
    //NSLog(@"got sps pps %ld %ld",sps.length,pps.length);
    const char bytes[] = "\x00\x00\x00\x01";
    size_t length = sizeof(bytes) - 1;
    NSData *byteHeader = [NSData dataWithBytes:bytes length:length];
    [[PushStream defaultStream] sendSps:sps pps:pps];
    [fileHandle writeData:byteHeader];
    [fileHandle writeData:sps];
    [fileHandle writeData:byteHeader];
    [fileHandle writeData:pps];
}
- (void)getEncodeData:(NSData *)data isKeyFrame:(BOOL)isKeyFrame frameCount:(long long)frameCount
{
    //NSLog(@"get encodedData %ld",data.length);
    if(fileHandle!=nil)
    {
        const char bytes[] = "\x00\x00\x00\x01";
        size_t length = sizeof(bytes) - 1;
        NSData *byteHeader = [NSData dataWithBytes:bytes length:length];
        [fileHandle writeData:byteHeader];
        [fileHandle writeData:data];
    }
    
    [[PushStream defaultStream] sendEncodeData:data isKeyFrame:isKeyFrame];
}



@end
