//
//  ViewController.m
//  HelloWorld
//
//  Created by leeson zhong on 2021/3/30.
//

#import "ViewController.h"
#import "mediapipe/objc/MPPCameraInputSource.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#import "mediapipe/objc/MPPGraph.h"


static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
static NSString* const kGraphName = @"mobile_gpu";

static const char* kInputStream = "input_video";
static const char* kOutputStream = "output_video";

@interface ViewController ()<MPPInputSourceDelegate,MPPGraphDelegate>

// The MediaPipe graph currently in use. Initialized in viewDidLoad, started in viewWillAppear: and
// sent video frames on _videoQueue.
@property(nonatomic) MPPGraph* mediapipeGraph;

@end

@implementation ViewController{
    // Handles camera access via AVCaptureSession library.
    MPPCameraInputSource* _cameraSource;
    
    // Process camera frames on this queue.
    dispatch_queue_t _videoQueue;
    
    // Display the camera preview frames.
    __weak IBOutlet UIView *_liveView;

    // Render frames in a layer.
    MPPLayerRenderer* _renderer;
    __weak IBOutlet UITextField *messageTextField;
    NSTimer *timer;
}

/**
 该代码初始化_cameraSource，设置捕获会话预设以及要使用的摄像机。我们需要将帧从_cameraSource获取到并在ViewController中显示它们。
 MPPCameraInputSource是MPPInputSource的子类，它为其委托人提供协议，即MPPInputSourceDelegate。ViewController实现即MPPInputSourceDelegate，设置为_cameraSource的委托。
 我们需要定义一个queue用于给相机处理输入的帧，变量名叫_videoQueue。我们将使用优先级为QOS_CLASS_USER_INTERACTIVE的串行队列来处理相机的帧

 */

- (void)viewDidLoad {
    [super viewDidLoad];
    
    dispatch_queue_attr_t qosAttribute = dispatch_queue_attr_make_with_qos_class(
          DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INTERACTIVE, /*relative_priority=*/0);
    _videoQueue = dispatch_queue_create(kVideoQueueLabel, qosAttribute);
    
    _cameraSource = [[MPPCameraInputSource alloc] init];
    _cameraSource.sessionPreset = AVCaptureSessionPresetHigh;
    _cameraSource.cameraPosition = AVCaptureDevicePositionBack;
    // The frame's native format is rotated with respect to the portrait orientation.
    _cameraSource.orientation = AVCaptureVideoOrientationPortrait;
    [_cameraSource setDelegate:self queue:_videoQueue];
    
    
    
    _renderer = [[MPPLayerRenderer alloc] init];
    _renderer.layer.frame = _liveView.layer.bounds;
    [_liveView.layer addSublayer:_renderer.layer];
    _renderer.frameScaleMode = MPPFrameScaleModeFillAndCrop;
    
    _liveView.backgroundColor = [UIColor redColor];
    messageTextField.text = @"0000";
    timer = [NSTimer timerWithTimeInterval:1 repeats:YES block:^(NSTimer * _Nonnull timer) {
        if (_cameraSource.running) {
            messageTextField.text = @"2222";
        }
    }];
    [[NSRunLoop mainRunLoop] addTimer:timer forMode:NSDefaultRunLoopMode];
    
    self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName];
    self.mediapipeGraph.delegate = self;
    // Set maxFramesInFlight to a small value to avoid memory contention for real-time processing.
    self.mediapipeGraph.maxFramesInFlight = 2;
}

/**
 MediaPipe提供了另一个名为MPPLayerRenderer的类，可在屏幕上显示图像。此实用程序可用于显示CVPixelBufferRef对象，这是MPPCameraInputSource提供给其委托的图像的类型
 要显示屏幕图像，我们需要向ViewController添加一个名为_liveView的新UIView对象。
 转到Main.storyboard，将对象库中的UIView对象添加到ViewController类的View。从此视图向刚刚添加到ViewController类的_liveView对象添加引用。调整视图的大小，使其居中并覆盖整个应用程序屏幕。
 返回ViewController.m，在viewDidLoad（）添加初始化_renderer对象代码，_liveView添加_renderer的layer来显示。
 实现MPPInputSourceDelegate协议的类，在viewDidLoad设置好_cameraSource的代理，实现processVideoFrame方法。从_cameraSource获取到帧数据，通过_renderer的方法renderPixelBuffer来显示接收到帧数据
 */

// Must be invoked on _videoQueue.
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
                timestamp:(CMTime)timestamp
               fromSource:(MPPInputSource*)source {
  if (source != _cameraSource) {
    NSLog(@"Unknown source: %@", source);
    return;
  }
  [self.mediapipeGraph sendPixelBuffer:imageBuffer
                            intoStream:kInputStream
                            packetType:MPPPacketTypePixelBuffer];
}

- (void)mediapipeGraph:(MPPGraph*)graph
   didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
             fromStream:(const std::string&)streamName {
  if (streamName == kOutputStream) {
    // Display the captured image on the screen.
    CVPixelBufferRetain(pixelBuffer);
    dispatch_async(dispatch_get_main_queue(), ^{
      [_renderer renderPixelBuffer:pixelBuffer];
      CVPixelBufferRelease(pixelBuffer);
    });
  }
}

/**
 在视图出现时立即启动相机。由于打开相机需要权限，MPPCameraInputSource提供了一个requestCameraAccessWithCompletionHandler函数来处理申请权限的结果，所以需要在用户响应后再启动相机。
 */
-(void)viewWillAppear:(BOOL)animated {
    [super viewWillAppear:animated];
    [_cameraSource requestCameraAccessWithCompletionHandler:^void(BOOL granted) {
      if (granted) {
        // Start running self.mediapipeGraph.
        NSError* error;
        if (![self.mediapipeGraph startWithError:&error]) {
          NSLog(@"Failed to start graph: %@", error);
        }
        else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
          NSLog(@"Failed to complete graph initial run: %@", error);
        }

        dispatch_async(_videoQueue, ^{
          [_cameraSource start];
        });
      }
    }];
}

+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
  // Load the graph config resource.
  NSError* configLoadError = nil;
  NSBundle* bundle = [NSBundle bundleForClass:[self class]];
  if (!resource || resource.length == 0) {
    return nil;
  }
  NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
  NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
  if (!data) {
    NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
    return nil;
  }

  // Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
  mediapipe::CalculatorGraphConfig config;
  config.ParseFromArray(data.bytes, data.length);

  // Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
  MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
  [newGraph addFrameOutputStream:kOutputStream outputPacketType:MPPPacketTypePixelBuffer];
  return newGraph;
}

@end
