//
//  VideoSession.m
//  OneToOne
//
//  Created by ByteDance on 2023/2/20.
//
#import "VideoH264Encoder.h"
#import "VideoSession.h"
#import "VideoTransform.h"
#import "Segmenter.h"
#import "UIUtilities.h"
#import "MetalView.h"
#import <CoreMedia/CoreMedia.h>
@interface VideoSession() <AVCaptureVideoDataOutputSampleBufferDelegate,SegmenterDelegate>
@property (nonatomic, strong) AVCaptureSession *videoSession;
@property (nonatomic, strong) AVCaptureDevice *videoDevice;
@property (nonatomic, strong) AVCaptureDeviceInput *videoInput;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic) dispatch_queue_t sessionQueue;

@property (nonatomic, strong) Segmenter *segmenter;
@property (nonatomic, strong) VideoH264Encoder *encoder;
@property (nonatomic, strong) MetalView *metalView;
@property (nonatomic, strong) UIImageView *imageView;
@property (nonatomic, strong) AVSampleBufferDisplayLayer *layer;

@end

@implementation VideoSession
- (instancetype)init {
    self = [super init];
    if (self) {
        [self layer];
        [self imageView];
        [self metalView];
        [self encoder];
        [self segmenter];
        [self videoSession];
        [self videoDevice];
        [self videoOutput];
        [self videoInput];
        
    }
    return self;
}
#pragma mark - delegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    [self.segmenter processWithBuffer:sampleBuffer];
}
- (void)receviceCVPixelBufferRef:(nonnull CVImageBufferRef)buffer {
    if (buffer == nil) {
      return;
    }
    UIImageOrientation orientation =
        UIImageOrientationRight;
    UIImage *image = [UIUtilities UIImageFromImageBuffer:buffer orientation:orientation];
    _imageView.image = image;
    
//    [self.encoder encodeCMSampleBuffer:[VideoTransform sampleBufferFromUIImage:image] h264DataBlock:^(NSData *data) {
//        [self.delegate sendVideo:data];
//    }];
}
- (void)receviceSendBuffer:(CVImageBufferRef)buffer {
    if (buffer == nil) {
      return;
    }
    UIImageOrientation orientation =
        UIImageOrientationRight;
    UIImage *image = [UIUtilities UIImageFromImageBuffer:buffer orientation:orientation];
    [self.encoder encodeCMSampleBuffer:[VideoTransform sampleBufferFromUIImage:image] h264DataBlock:^(NSData *data) {
        [self.delegate sendVideo:data];
    }];
}
#pragma mark - public
- (void)bindToWindow:(UIView *)view {
    self.imageView = [[UIImageView alloc] initWithFrame:view.frame];
    self.imageView.layer.borderColor = UIColor.blackColor.CGColor;
    self.imageView.layer.borderWidth = 2.0;
    [view addSubview:self.imageView];
    UILabel *label = [[UILabel alloc] initWithFrame:CGRectMake(0, self.imageView.frame.size.height - 25, 50, 25)];
    label.text = @"本端";
    label.layer.cornerRadius = 4;
    label.layer.shadowRadius = 4;
    label.backgroundColor = [UIColor whiteColor];
    [self.imageView addSubview:label];
}
#pragma mark - getter
- (AVSampleBufferDisplayLayer *)layer {
    if (!_layer) {
        _layer = [AVSampleBufferDisplayLayer layer];
        _layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    }
    return _layer;
}


- (MetalView *)metalView {
    if (!_metalView) {
        _metalView = [[MetalView alloc] init];
    }
    return _metalView;
}

- (AVCaptureSession *)videoSession {
    if (!_videoSession) {
        _videoSession = [[AVCaptureSession alloc] init];
        _sessionQueue = dispatch_queue_create("sessionQueue", nil);
        [_videoSession beginConfiguration];
        _videoSession.sessionPreset = AVCaptureSessionPresetMedium;
    }
    return _videoSession;
}

- (AVCaptureDevice *)videoDevice {
    if (!_videoDevice) {
        AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
            discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
                                  mediaType:AVMediaTypeVideo
                                   position:AVCaptureDevicePositionUnspecified];
        for (AVCaptureDevice *device in discoverySession.devices) {
          if (device.position == AVCaptureDevicePositionFront) {
              _videoDevice = device;
              break;
           }
        }
        
    }
    return _videoDevice;
}

- (AVCaptureDeviceInput *)videoInput {
    if (!_videoInput) {
        _videoInput = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:nil];
        [self.videoSession beginConfiguration];
        [self.videoSession addInput:_videoInput];
        [self.videoSession commitConfiguration];
        [self.videoSession startRunning];
    }
    return _videoInput;
}

- (AVCaptureVideoDataOutput *)videoOutput {
    if (!_videoOutput) {
        _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        _videoOutput.videoSettings = @{
              (id)
              kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
        };
        _videoOutput.alwaysDiscardsLateVideoFrames = YES;
        dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL);
        [_videoOutput setSampleBufferDelegate:self queue:videoQueue];
        [self.videoSession addOutput:_videoOutput];
        [self.videoSession commitConfiguration];
        [_videoOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = 1;//镜头旋转
        [self.videoSession commitConfiguration];
    }
    return _videoOutput;
}


- (VideoH264Encoder *)encoder {
    if (!_encoder) {
        _encoder = [[VideoH264Encoder alloc] initWithVedioWidth:640 vedioHeight:480];
    }
    return _encoder;
}
- (Segmenter *)segmenter {
    if (!_segmenter) {
        _segmenter = [[Segmenter alloc] init];
        _segmenter.delegate = self;
    }
    return _segmenter;
}

@end
