//
//  VideoTransform.m
//  OneToOne
//
//  Created by ByteDance on 2023/3/24.
//

#import "VideoTransform.h"


@implementation VideoTransform

+ (CMSampleBufferRef)sampleBufferFromUIImage:(UIImage *)image {
    // 获取CGImageRef对象
    CGImageRef cgImage = image.CGImage;
    
    // 获取图像属性
    size_t width = CGImageGetWidth(cgImage);
    size_t height = CGImageGetHeight(cgImage);
    CGRect imageRect = CGRectMake(0, 0, width, height);
    NSDictionary *options = @{
        (NSString *)kCVPixelBufferCGImageCompatibilityKey : @(YES),
        (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey : @(YES)
    };
    
    // 创建CVPixelBufferRef
    CVPixelBufferRef pixelBuffer = NULL;
    CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pixelBuffer);
    if (result != kCVReturnSuccess) {
        return NULL;
    }
    
    // 将UIImage的数据存储到CVPixelBufferRef中
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    void *pixelData = CVPixelBufferGetBaseAddress(pixelBuffer);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pixelData, width, height, 8, CVPixelBufferGetBytesPerRow(pixelBuffer), colorSpace, kCGImageAlphaNoneSkipFirst);
    CGContextDrawImage(context, imageRect, cgImage);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
    // 创建CMSampleBufferRef并将CVPixelBufferRef添加到其中
    CMSampleBufferRef sampleBuffer = NULL;
    CMFormatDescriptionRef formatDescription = NULL;
    CMSampleTimingInfo timingInfo = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
    result = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDescription);
    if (result != noErr) {
        CVPixelBufferRelease(pixelBuffer);
        return NULL;
    }
    result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, formatDescription, &timingInfo, &sampleBuffer);
    if (result != noErr) {
        CVPixelBufferRelease(pixelBuffer);
        return NULL;
    }
    
    // 释放对象
    CFRelease(formatDescription);
    CVPixelBufferRelease(pixelBuffer);
    
    return sampleBuffer;
}

+ (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer
{
    if (!pixelBuffer){
        return NULL;
    }
    CVPixelBufferRef _previousPixelBuffer = CVPixelBufferRetain(pixelBuffer);
    
    //不设置具体时间信息
    CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
    //获取视频信息
    CMVideoFormatDescriptionRef videoInfo = NULL;
    OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(NULL, _previousPixelBuffer, &videoInfo);
    NSParameterAssert(result == 0 && videoInfo != NULL);
    
    CMSampleBufferRef sampleBuffer = NULL;
    result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,_previousPixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
    NSParameterAssert(result == 0 && sampleBuffer != NULL);
    if (_previousPixelBuffer) {
        CFRelease(_previousPixelBuffer);
    }
    if (videoInfo) {
        CFRelease(videoInfo);
    }
    
    CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
    CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
    CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
    return sampleBuffer;
}

+ (NSData *)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    // 获取SampleBuffer的CMBlockBufferRef
    CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    // 获取CMBlockBuffer的数据长度
    size_t length = CMBlockBufferGetDataLength(blockBuffer);
    // 分配内存
    NSMutableData *data = [NSMutableData dataWithLength:length];
    // 把CMBlockBuffer的数据复制到分配的内存中
    CMBlockBufferCopyDataBytes(blockBuffer, 0, length, data.mutableBytes);
    return data;
}

+ (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer orientation:(UIImageOrientation)orientation {
    CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)];
    CIContext *context = [[CIContext alloc] init];
    CGImageRef cgimage = [context createCGImage:ciImage fromRect:[ciImage extent]];
    UIImage *image= [UIImage imageWithCGImage:cgimage scale:1.f orientation:orientation];
    CGImageRelease(cgimage);
    return image;
}

+ (CVPixelBufferRef)mirrorSampleBuffler:(CMSampleBufferRef)sampleBufRef {
    //获取CMSampleBuffer的图像地址
    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
    if (!pixelBuffer) {
        return nil;
    }
    //表示开始操作数据
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    //图像宽度(像素)
    size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
    //图像高度(像素)
    size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
    //获取CVImageBufferRef中的y数据
    uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    //获取CMVImageBufferRef中的uv数据
    uint8_t *src_uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    //y stride
    size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
    //uv stride
    size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
    //y height
    size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
    //uv height
    size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
    //y_size
    size_t plane1_size = plane1_stride * plane1_height;
    //uv_size
    size_t plane2_size = plane2_stride * plane2_height;
    //yuv_size
    size_t frame_size = plane1_size + plane2_size;

    uint8_t *nv12_y = (uint8_t *)malloc(frame_size);
    uint8_t *nv12_uv = nv12_y + plane1_stride * plane1_height;
    
    // 1. 对NV12数据镜像处理
//    libyuv::NV12Mirror(/*const uint8_t *src_y*/ src_y_frame,
//                       /*int src_stride_y*/ (int)plane1_stride,
//                       /*const uint8_t *src_uv*/ src_uv_frame,
//                       /*int src_stride_uv*/ (int)plane1_stride >> plane2_stride,
//                       /*uint8_t *dst_y*/ nv12_y,
//                       /*int dst_stride_y*/ (int)plane1_stride,
//                       /*uint8_t *dst_uv*/ nv12_uv,
//                       /*int dst_stride_uv*/ (int)plane1_stride >> plane2_stride,
//                       /*int width*/ (int)buffer_width,
//                       /*int height*/ (int)buffer_height);

     CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
    // 2.处理完的镜像NV12数据再转换为CVPixelBufferRef
    NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
    CVPixelBufferRef dstPixelBuffer = NULL;
    CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
                                          buffer_width, buffer_height, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
                                          (__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);

    CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
    uint8_t *yDstPlane = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
    memcpy(yDstPlane, nv12_y, plane1_size);
    uint8_t *uvDstPlane = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
    memcpy(uvDstPlane, nv12_uv, plane2_size);
    if (result != kCVReturnSuccess) {
        NSLog(@"Unable to create cvpixelbuffer %d", result);
    }
    CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
    free(nv12_y);

    return dstPixelBuffer;
}
@end
