//
//  ScreenRecord.m
//  ScreenRecord_ios_oc
//
//  Created by 51talk on 2021/5/13.
//

#import <Foundation/Foundation.h>
#import "ScreenRecord.h"
#import "ACME_proxy.h"
#import "DumpFile.h"


@interface ScreenRecord()

@end

static ScreenRecord* Instance = nil;

@implementation ScreenRecord
bool dumpAppScreen = true;
bool appScreenFlag = true;
DumpFile *dumpAppScreenFile = nil;

+ (ScreenRecord *)getInstance {
    @synchronized (self) {
        if(Instance == nil) {
            Instance = [self alloc];
        }
    }
    return Instance;
}

- (void)handleSampleBuffer:(RPSampleBufferType)bufferType sampleBufferRef:(CMSampleBufferRef)sampleBuffer {
    switch (bufferType) {
        case RPSampleBufferTypeVideo:
        {
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            if(CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
                NSLog(@"lock data failed!");
                return;
            }
            
            int pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
            
            switch(pixelFormat) {
                case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
                    NSLog(@"420v");
                    break;
                case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
//                    NSLog(@"420f");
                    break;
                case kCVPixelFormatType_32RGBA:
                    NSLog(@"32rgba");
                    break;
                default:
                    NSLog(@"capture pixel format=0x%x", pixelFormat);
                    break;
            }
            
            size_t pixelWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
            size_t pixelHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
            size_t width = CVPixelBufferGetWidth(pixelBuffer);
            size_t height = CVPixelBufferGetHeight(pixelBuffer);
            
            uint8_t *y_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
            size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
            size_t y_size = pixelWidth * pixelHeight;
            size_t uv_size = y_size / 2;
            int length = y_size + uv_size;
            uint8_t *yuv_buffer = new uint8_t[y_size+uv_size];
            if(pixelWidth != y_stride) {
                for(int i=0; i<pixelHeight; i++) {
                    memcpy(&yuv_buffer[i*pixelWidth], &y_frame[i*y_stride], pixelWidth);
                }
            } else {
                memcpy(yuv_buffer, y_frame, y_size);
            }
            
            uint8_t *uv_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
            size_t uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
            uint8_t *u_buffer = yuv_buffer+y_size;
            uint8_t *v_buffer = u_buffer + uv_size/2;
            if(pixelWidth != uv_stride) {
                for(int i=0; i<pixelHeight/2; i++) {
                    for(int j=0; j<pixelWidth/2; j++) {
                        u_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2];
                        v_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2 + 1];
                    }
                }
            } else {
                for(int i=0; i<uv_size/2; i++) {
                    u_buffer[i] = uv_frame[i*2];
                    v_buffer[i] = uv_frame[i*2 + 1];
                }
            }
            CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

            [self diliverYuvData:kI420 frame:yuv_buffer width:pixelWidth height:pixelHeight size:(int)length];
            delete [] yuv_buffer;
           
            break;
        }
            
        case RPSampleBufferTypeAudioApp:
            break;
        case RPSampleBufferTypeAudioMic:
            break;
            
        default:
            break;
    }
}

- (void)diliverYuvData:(enum VideoType)videoType frame:(unsigned char*)frame width:(int)width height:(int)height size:(int)size {
//    NSLog(@"diliverYuvData");
    [[MediaEngine SDK] DeliverExternalFrame:videoType frame:frame width:width height:height size:size];
    
    if(dumpAppScreen && appScreenFlag) {
        dumpAppScreenFile = [[DumpFile alloc] init];
        [dumpAppScreenFile openFileStream:width height:height];
        appScreenFlag = false;
    }
    
    [dumpAppScreenFile writeToFile:frame size:size];
}

@end
