//
//  SampleHandler.m
//  ScreenRecordUpload
//
//  Created by 51talk on 2021/5/17.
//

#import <UserNotifications/UserNotifications.h>


#import "SampleHandler.h"
#import "NTESSocket.h"
#import "DumpFile.h"
#import "config.h"
#import "libyuv.h"

static NSString * _Nonnull kAppGroup = @"group.screen.record.system";

@interface SampleHandler()

@property (nonatomic, strong) NSUserDefaults *userDefautls;
@property NTESSocket* clientSocket;

@end

@implementation SampleHandler
NSString *clientPort = @"8899";
NSString *clientIp = @"127.0.0.1";
NSString *serverPort = @"8898";
NSString *serverIp = @"127.0.0.1";

ShareDataMode eShareDataMode = AppGroups;

- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
    // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
    NSLog(@"broadcastStartedWithSetupInfo");
    if (eShareDataMode == AppGroups) {
        self.userDefautls = [[NSUserDefaults alloc] initWithSuiteName:kAppGroup];
    } else if(eShareDataMode == Socket) {
        [self connectToHostApp];
    }
    
//    [self sendLocalNotificationToHostAppWithTitle:@"视频直播通知" msg:@"开始直播" userInfo:@{kReplayKit2UploadingKey: kReplayKit2Uploading}];
}

- (void)broadcastPaused {
    // User has requested to pause the broadcast. Samples will stop being delivered.
    NSLog(@"broadcastPaused");
}

- (void)broadcastResumed {
    // User has requested to resume the broadcast. Samples delivery will resume.
    NSLog(@"broadcastResumed");
}

- (void)broadcastFinished {
    // User has requested to finish the broadcast.
    NSLog(@"broadcastFinished");
    if(eShareDataMode == Socket) {
        [self closeOwnSocket];
    }
    
//    if(eShareDataMode == AppGroups) {
//        [self.userDefautls removeObjectForKey:@"frame"];
//    }
}

- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
    
    switch (sampleBufferType) {
        case RPSampleBufferTypeVideo:
            // Handle video sample buffer
            @autoreleasepool {
                if(eShareDataMode == AppGroups) {
                    NSLog(@"upload extension send data");
                    NSDictionary *frame = [self handleSampleBuffer:sampleBufferType sampleBufferRef:sampleBuffer];
                    [self.userDefautls setObject:frame forKey:@"frame"];
                    [self.userDefautls synchronize];
                } else if(eShareDataMode == Socket) {
                    [self handleSampleBufferAndSend:sampleBufferType sampleBufferRef:sampleBuffer];
                }
            }
            break;
        case RPSampleBufferTypeAudioApp:
            // Handle audio sample buffer for app audio
            break;
        case RPSampleBufferTypeAudioMic:
            // Handle audio sample buffer for mic audio
            break;
            
        default:
            break;
    }
}

- (NSDictionary*)handleSampleBuffer:(RPSampleBufferType)bufferType sampleBufferRef:(CMSampleBufferRef)sampleBuffer {
    NSDictionary *frame = nil;

    switch (bufferType) {
        case RPSampleBufferTypeVideo:
        {
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            if(CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
                NSLog(@"lock data failed!");
                return nil;
            }
            
            int pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
            switch(pixelFormat) {
                case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
                    NSLog(@"420v");
                    break;
                case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
//                    NSLog(@"420f");
                    break;
                case kCVPixelFormatType_32RGBA:
                    NSLog(@"32rgba");
                    break;
                default:
                    NSLog(@"capture pixel format=0x%x", pixelFormat);
                    break;
            }
            
            size_t pixelWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
            size_t pixelHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//            size_t width = CVPixelBufferGetWidth(pixelBuffer);
//            size_t height = CVPixelBufferGetHeight(pixelBuffer);
//            NSLog(@"width=%d,height=%d", pixelWidth, pixelHeight);
            
            uint8_t *y_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
            size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
            size_t y_size = pixelWidth * pixelHeight;
            size_t uv_size = y_size / 2;
            int length = (int)(y_size + uv_size);
            uint8_t *yuv_buffer = (uint8_t*)malloc(length);
            if(pixelWidth != y_stride) {
                for(int i=0; i<pixelHeight; i++) {
                    memcpy(&yuv_buffer[i*pixelWidth], &y_frame[i*y_stride], pixelWidth);
                }
            } else {
                memcpy(yuv_buffer, y_frame, y_size);
            }
            
            uint8_t *uv_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
            size_t uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
            uint8_t *u_buffer = yuv_buffer+y_size;
            uint8_t *v_buffer = u_buffer + uv_size/2;
            if(pixelWidth != uv_stride) {
                for(int i=0; i<pixelHeight/2; i++) {
                    for(int j=0; j<pixelWidth/2; j++) {
                        u_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2];
                        v_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2 + 1];
                    }
                }
            } else {
                for(int i=0; i<uv_size/2; i++) {
                    u_buffer[i] = uv_frame[i*2];
                    v_buffer[i] = uv_frame[i*2 + 1];
                }
            }
            CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
            

            NSData *data = [NSData dataWithBytesNoCopy:yuv_buffer length:length];
            frame = @{
                @"width": @(pixelWidth),
                @"height": @(pixelHeight),
                @"data": data,
            };
            break;
        }
        case RPSampleBufferTypeAudioApp:
            break;
        case RPSampleBufferTypeAudioMic:
            break;
            
        default:
            break;
    }
    
    return frame;
}

- (void)handleSampleBufferAndSend:(RPSampleBufferType)bufferType sampleBufferRef:(CMSampleBufferRef)sampleBuffer {
    switch (bufferType) {
        case RPSampleBufferTypeVideo:
        {
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            
#if 0 //不在应用层做scale
            CVPixelBufferLockBaseAddress(pixelBuffer, 0);
            // 转I420
            int psrc_w = (int)CVPixelBufferGetWidth(pixelBuffer);
            int psrc_h = (int)CVPixelBufferGetHeight(pixelBuffer);
            uint8_t *src_y = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
            uint8 *src_uv = (uint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
            int y_stride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
            int uv_stride = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
            uint8_t *i420_buf = (uint8_t *)malloc((psrc_w * psrc_h * 3) >> 1);
            
            NV12ToI420(&src_y[0],                              y_stride,
                               &src_uv[0],                             uv_stride,
                               &i420_buf[0],                           psrc_w,
                               &i420_buf[psrc_w * psrc_h],             psrc_w >> 1,
                               &i420_buf[(psrc_w * psrc_h * 5) >> 2],  psrc_w >> 1,
                               psrc_w, psrc_h);

            // 缩放至720
            int pdst_w = 720;
            int pdst_h = 1280;
            int dataLength = pdst_w * pdst_h * 3 >> 1;

            FilterModeEnum filter = kFilterNone;
            uint8_t *pdst_buf = (uint8_t *)malloc((pdst_w * pdst_h * 3) >> 1);
            I420Scale(&i420_buf[0],                          psrc_w,
                              &i420_buf[psrc_w * psrc_h],            psrc_w >> 1,
                              &i420_buf[(psrc_w * psrc_h * 5) >> 2], psrc_w >> 1,
                              psrc_w, psrc_h,
                              &pdst_buf[0],                          pdst_w,
                              &pdst_buf[pdst_w * pdst_h],            pdst_w >> 1,
                              &pdst_buf[(pdst_w * pdst_h * 5) >> 2], pdst_w >> 1,
                              pdst_w, pdst_h,
                              filter);

            free(i420_buf);
            i420_buf = nil;
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
            
            [self sendVideoBufferToHostApp:pdst_buf len:dataLength];
            
            free(pdst_buf);
            pdst_buf = nil;
#endif
            
#if 1
            if(CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
                NSLog(@"lock data failed!");
                return;
            }
            
            int pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
            switch(pixelFormat) {
                case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
                    NSLog(@"420v");
                    break;
                case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
//                    NSLog(@"420f");
                    break;
                case kCVPixelFormatType_32RGBA:
                    NSLog(@"32rgba");
                    break;
                default:
                    NSLog(@"capture pixel format=0x%x", pixelFormat);
                    break;
            }
            
            size_t pixelWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
            size_t pixelHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
            //录制系统屏幕分辨率为 886*1918
//            size_t width = CVPixelBufferGetWidth(pixelBuffer);
//            size_t height = CVPixelBufferGetHeight(pixelBuffer);
//            NSLog(@"plane width=%d, height=%d", pixelWidth, pixelHeight);
//            NSLog(@"width=%d, height=%d", width, height);
            
            uint8_t *y_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
            size_t y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
            size_t y_size = pixelWidth * pixelHeight;
            size_t uv_size = y_size / 2;
            int length = (int)(y_size + uv_size);
            uint8_t *yuv_buffer = (uint8_t*)malloc(length);
            if(pixelWidth != y_stride) {
                for(int i=0; i<pixelHeight; i++) {
                    memcpy(&yuv_buffer[i*pixelWidth], &y_frame[i*y_stride], pixelWidth);
                }
            } else {
                memcpy(yuv_buffer, y_frame, y_size);
            }
            
            uint8_t *uv_frame = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
            size_t uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
            uint8_t *u_buffer = yuv_buffer+y_size;
            uint8_t *v_buffer = u_buffer + uv_size/2;
            if(pixelWidth != uv_stride) {
                for(int i=0; i<pixelHeight/2; i++) {
                    for(int j=0; j<pixelWidth/2; j++) {
                        u_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2];
                        v_buffer[i*pixelWidth/2 + j] = uv_frame[i*uv_stride + j*2 + 1];
                    }
                }
            } else {
                for(int i=0; i<uv_size/2; i++) {
                    u_buffer[i] = uv_frame[i*2];
                    v_buffer[i] = uv_frame[i*2 + 1];
                }
            }
            CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
            
            [self sendVideoBufferToHostApp:yuv_buffer len:length];
            free(yuv_buffer);
            yuv_buffer = nil;
#endif
            break;
        }
        case RPSampleBufferTypeAudioApp:
            break;
        case RPSampleBufferTypeAudioMic:
            break;
        default:
            break;
    }
}

- (void)connectToHostApp {
    self.clientSocket = [[NTESSocket alloc] initWithPort:clientPort ip:clientIp];

    // 向Server Socket发起连接
    bool success = [self.clientSocket connectToServerWithPort:serverPort ip:serverIp];
    if (success) {
        // 开始接收控制命令数据
        NSLog(@"连接上了服务器");
    }
}

- (void)sendVideoBufferToHostApp:(uint8_t*)buffer len:(int)length {
    [self.clientSocket sendData:buffer length:length];
}

- (void)closeOwnSocket {
    [self.clientSocket closeSocket];
}


- (void)sendLocalNotificationToHostAppWithTitle:(NSString*)title msg:(NSString*)msg userInfo:(NSDictionary*)userInfo
{
    UNUserNotificationCenter* center = [UNUserNotificationCenter currentNotificationCenter];
    
    UNMutableNotificationContent* content = [[UNMutableNotificationContent alloc] init];
    content.title = [NSString localizedUserNotificationStringForKey:title arguments:nil];
    content.body = [NSString localizedUserNotificationStringForKey:msg  arguments:nil];
    content.sound = [UNNotificationSound defaultSound];
    content.userInfo = userInfo;
    
    // 在 设定时间 后推送本地推送
    UNTimeIntervalNotificationTrigger* trigger = [UNTimeIntervalNotificationTrigger
                                                  triggerWithTimeInterval:0.1f repeats:NO];
    
    UNNotificationRequest* request = [UNNotificationRequest requestWithIdentifier:@"-1talk.ScreenRecord-ios-oc"
                                                                          content:content trigger:trigger];
    
    //添加推送成功后的处理！
    [center addNotificationRequest:request withCompletionHandler:^(NSError * _Nullable error) {
        
    }];
}


@end
