//
//  OcObj.m
//  TestSwiftCallOCOCCallC++
//
//  Created by CityDo on 2019/8/27.
//  Copyright © 2019 CityDo. All rights reserved.
//

//#include <opencv2/core/core.hpp>


#import "OcObj.h"
#import "CPPHello.hpp"
#import "NormalizeImage.hpp"
#import "UIImage+OpenCV.h"
#import "QYRectView.h"
#import <opencv2/imgcodecs/ios.h>




#include <iostream>
#include <fstream>

normalize::Image *tempImageOne;
cv::Mat src;
cv::Mat dstImag;
cv::Mat dstInitshape;
cv::Mat dstRot;
cv::Mat dstTrans;
std::vector<int> inBox(4);
std::vector<int> outBox(4);
Float32 findFaceTime;
int frameIdx;

@implementation OcObj{
    NSInteger count;
}

-(void)testLog
{
    ////        NSLog(@"hello OC\n");
    CPPHello *cphello = new CPPHello() ;
    cphello->hello();
}

#pragma mark --图片处理
-(UIImage *)imageNormImg:(CVPixelBufferRef)pixelBuffer;
{
    cv::Mat mat ;
    cv::Mat tempMat ;
    count = count+1;
    
    if (count ==1) {
        UIImage *image = [self imageFromCVPixelBufferRef1:pixelBuffer];
        [self saveImageToDomainsWithDirectorystringByAppendingPathComponent:@"imageNormImg-src.jpg" WithImage:image];
    }
    
    OSType format = CVPixelBufferGetPixelFormatType(pixelBuffer);
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    mat   = cv::Mat(height, width, CV_8UC4, address,bytesPerRow);
    cv::cvtColor(mat, src, CV_BGRA2GRAY);
    
    tempImageOne->normImg(src, dstImag, dstInitshape, dstRot, dstTrans,inBox,frameIdx,outBox,findFaceTime);

    
    UIImage *image = [UIImage imageWithCVMat:dstImag];
    //    UIImage *image = MatToUIImage(dstImag);
    
    if (count ==1) {
        [self saveImageToDomainsWithDirectorystringByAppendingPathComponent:@"imageNormImg-dstImag-ios.jpg" WithImage:image];
        [self saveImageToDocumentsPath:@"imageNormImg-dstImag-cv.jpg" With:dstImag];
    }
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    return image;
}

void abgr2rgb(unsigned char * src,unsigned char * dest,int width,int height)
{
    for(int i=0;i<width*height;i++,src+=4)
    {
        *dest++=*src+2;
        *dest++=*src+1;
        *dest++=*src+0;
    }
}

-(CVPixelBufferRef)imageNormImgToPixelBuffer:(CVPixelBufferRef)pixelBuffer;
{
    cv::Mat mat ;
    cv::Mat _mat ;
    
    
    
    //    mat =[self matFromImageBuffer:pixelBuffer];
    //
    //    if (count ==1) {
    //        [self saveImageToDocumentsPath:@"imageNormImgToPixelBuffer-cv.jpg" With:mat];
    //
    //    }
    //
    //    NSLog(@"src 的值  cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", src.cols,
    //          src.rows,
    //          src.dims, src.channels(), src.elemSize());
    //    count = count+1;
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    
    mat   = cv::Mat(height, width, CV_8UC4, address, bytesPerRow);
    cv::cvtColor(mat, src, CV_BGRA2GRAY);
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, bytesPerRow);
    
    tempImageOne->normImg(src, dstImag, dstInitshape, dstRot, dstTrans,inBox,frameIdx,outBox,findFaceTime);

    NSLog(@"After normImg   cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", dstImag.cols,
          dstImag.rows,
          dstImag.dims, dstImag.channels(), dstImag.elemSize());
    
    CVPixelBufferRef imageBuffer;
    
    
    //    CGSize size={21,21};
    //    imageBuffer = [self createPixelBufferWithSize:size];
    
    imageBuffer = [self getImageBufferFromMat:dstImag];
    
    NSLog(@"After getImageBufferFromMat   cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", dstImag.cols,
          dstImag.rows,
          dstImag.dims, dstImag.channels(), dstImag.elemSize());
    
    return imageBuffer;
}

-(NSData  *)commonNormImgToBuffer:(CVPixelBufferRef)pixelBuffer;
{
    cv::Mat mat ;
    cv::Mat _mat ;
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    
    mat   = cv::Mat(height, width, CV_8UC4, address, bytesPerRow);
    cv::cvtColor(mat, src, CV_BGRA2GRAY);
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, bytesPerRow);
    
    tempImageOne->normImg(src, dstImag, dstInitshape, dstRot, dstTrans,inBox,frameIdx,outBox,findFaceTime);

    NSLog(@"After normImg   cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", dstImag.cols,
          dstImag.rows,
          dstImag.dims, dstImag.channels(), dstImag.elemSize());
    
    NSData *aa = [NSData dataWithBytes:dstImag.data length:112*112*4];
    
    return aa;
}

-(NSDictionary *)normImgToBuffer:(CVPixelBufferRef)pixelBuffer With:(NSArray *)preBox;
{
    cv::Mat mat ;
    cv::Mat _mat ;
    
    NSMutableDictionary *tempDictionary =  [[NSMutableDictionary alloc] init];
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    
    mat   = cv::Mat(height, width, CV_8UC4, address, bytesPerRow);
    cv::cvtColor(mat, src, CV_BGRA2GRAY);
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, bytesPerRow);
    

    if (preBox.count ==0) {
        frameIdx = 0;
    } else {
        
       for (NSUInteger i = 0;  i < 4; ++i) {
            
            NSNumber *temp = preBox[i];
            inBox[i] = [temp intValue];
        }

    }
    
    tempImageOne->normImg(src, dstImag, dstInitshape, dstRot, dstTrans,inBox,frameIdx,outBox,findFaceTime);
    frameIdx ++;

    dstRot = dstRot;
    dstTrans = dstTrans;
    NSLog(@"After normImg   cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", dstImag.cols,
          dstImag.rows,
          dstImag.dims, dstImag.channels(), dstImag.elemSize());
    
    //        cv::Point tl(box[0], box[1]);
    //        cv::Point br(box[2],box[3]);

    //        cv::Scalar magenta = cv::Scalar(0,155,0,155);
    //        cv::rectangle(mat, tl, br, magenta, 4, 8, 0);
    
    //    UIImage *tempImage = MatToUIImage(mat);
    //    NSData *imageSrcData = UIImageJPEGRepresentation(tempImage, 1);
    //    [tempDictionary setObject:imageSrcData forKey:@"imageSrcData"];
    
    
    NSNumber *findFaceDuration = [NSNumber numberWithFloat:findFaceTime];
    [tempDictionary setObject:findFaceDuration forKey:@"findFaceTime"];
    
    NSMutableArray *array = [NSMutableArray new];
    
    for (size_t i = 0; i < outBox.size(); i++)
    {
        int  temp = outBox[i];
        NSNumber *tempNum = [NSNumber numberWithInt:temp];
        [array addObject:tempNum];
    }
    
    [tempDictionary setObject:array forKey:@"findFaceData"];
    
    NSData *dstImagData = [NSData dataWithBytes:dstImag.data length:112*112*4];
    [tempDictionary setObject:dstImagData forKey:@"dstImagData"];
    
    return tempDictionary;
}

-(NSDictionary *)normImgToBuffer:(CVPixelBufferRef)pixelBuffer;
{
    cv::Mat mat ;
    cv::Mat _mat ;
    
    NSMutableDictionary *tempDictionary =  [[NSMutableDictionary alloc] init];
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);

    mat   = cv::Mat(height, width, CV_8UC4, address, bytesPerRow);
    cv::cvtColor(mat, src, CV_BGRA2GRAY);
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, bytesPerRow);
    
    frameIdx =0;
    tempImageOne->normImg(src, dstImag, dstInitshape, dstRot, dstTrans,inBox,frameIdx,outBox,findFaceTime);
    
    NSLog(@"After normImg   cols=%d rows=%d size dims=%d channels=%d  elemSize=%zu", dstImag.cols,
          dstImag.rows,
          dstImag.dims, dstImag.channels(), dstImag.elemSize());
    
    //        cv::Point tl(box[0], box[1]);
    //        cv::Point br(box[2],box[3]);
    //
    //        cv::Scalar magenta = cv::Scalar(0,155,0,155);
    //        cv::rectangle(mat, tl, br, magenta, 4, 8, 0);
    
    //    UIImage *tempImage = MatToUIImage(mat);
    //    NSData *imageSrcData = UIImageJPEGRepresentation(tempImage, 1);
    //    [tempDictionary setObject:imageSrcData forKey:@"imageSrcData"];
    
    
    NSNumber *findFaceDuration = [NSNumber numberWithFloat:findFaceTime];
    [tempDictionary setObject:findFaceDuration forKey:@"findFaceTime"];

    NSMutableArray *array = [NSMutableArray new];
    
    for (size_t i = 0; i < outBox.size(); i++)
    {
        int  temp = outBox[i];
        NSNumber *tempNum = [NSNumber numberWithInt:temp];
        [array addObject:tempNum];
    }
    
    [tempDictionary setObject:array forKey:@"findFaceData"];
    
    NSData *dstImagData = [NSData dataWithBytes:dstImag.data length:112*112*4];
    [tempDictionary setObject:dstImagData forKey:@"dstImagData"];
    
    return tempDictionary;
}

-(CVPixelBufferRef)createPixelBufferWithSize:(CGSize)size {
    const void *keys[] = {
        kCVPixelBufferOpenGLESCompatibilityKey,
        kCVPixelBufferIOSurfacePropertiesKey,
    };
    const void *values[] = {
        (__bridge const void *)([NSNumber numberWithBool:YES]),
        (__bridge const void *)([NSDictionary dictionary])
    };
    
    OSType bufferPixelFormat = kCVPixelFormatType_32BGRA;
    
    CFDictionaryRef optionsDictionary = CFDictionaryCreate(NULL, keys, values, 2, NULL, NULL);
    
    CVPixelBufferRef pixelBuffer = NULL;
    CVPixelBufferCreate(kCFAllocatorDefault,
                        size.width,
                        size.height,
                        bufferPixelFormat,
                        optionsDictionary,
                        &pixelBuffer);
    
    //    CFRelease(optionsDictionary);
    
    return pixelBuffer;
}


-(cv::Mat) matFromImageBuffer: (CVImageBufferRef) pixelBuffer {
    
    cv::Mat mat ;
    cv::Mat _mat ;
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    void *address = CVPixelBufferGetBaseAddress(pixelBuffer);
    int width = (int) CVPixelBufferGetWidth(pixelBuffer);
    int height = (int) CVPixelBufferGetHeight(pixelBuffer);
    int bytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    mat   = cv::Mat(height, width, CV_8UC4, address, bytesPerRow);
    cv::cvtColor(mat, _mat, CV_BGRA2GRAY);
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
    return _mat;
}

- (CVPixelBufferRef) getImageBufferFromMat: (cv::Mat) mat {
    cv::Mat _mat ;
    //    cv::cvtColor(mat, mat, CV_BGR2RGB);
    
    int width = mat.cols;
    int height = mat.rows;
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             // [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             // [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             [NSNumber numberWithInt:width], kCVPixelBufferWidthKey,
                             [NSNumber numberWithInt:height], kCVPixelBufferHeightKey,
                             nil];
    
    CVPixelBufferRef imageBuffer;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorMalloc, width, height, kCVPixelFormatType_32AlphaGray, (CFDictionaryRef) CFBridgingRetain(options), &imageBuffer) ;
    
    
    NSParameterAssert(status == kCVReturnSuccess && imageBuffer != NULL);
    
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    void *base = CVPixelBufferGetBaseAddress(imageBuffer) ;
    memcpy(base, mat.data, mat.total());
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
    return imageBuffer;
}

-(void)initNormImg:(NSString *)strDetectorFile SettingsFile:(NSString *) strSettingsFile;
{
    count = 0;
    tempImageOne->init([strDetectorFile UTF8String],[strSettingsFile UTF8String]);
    
}

//CVPixelBufferRef 是 CMSampleBufferRef 解码后的数据

/** UIImage covert to CMSampleBufferRef */

/** CVPixelBufferRef covert to UIImage */
- (UIImage *)imageFromCVPixelBufferRef0:(CVPixelBufferRef)pixelBuffer{
    // MUST READ-WRITE LOCK THE PIXEL BUFFER!!!!
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    //    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    
    CIContext* temporaryContext = [CIContext contextWithOptions:@{kCIContextUseSoftwareRenderer : @(YES)}];
    
    CGImageRef videoImage = [temporaryContext
                             createCGImage:ciImage
                             fromRect:CGRectMake(0, 0,
                                                 CVPixelBufferGetWidth(pixelBuffer),
                                                 CVPixelBufferGetHeight(pixelBuffer))];
    
    UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
    CGImageRelease(videoImage);
    CVPixelBufferRelease(pixelBuffer);
    return uiImage;
}


-(UIImage *)imageFromCVPixelBufferRef1:(CVPixelBufferRef)pixelBuffer{
    UIImage *image;
    @autoreleasepool {
        CGImageRef cgImage = NULL;
        CVPixelBufferRef pb = (CVPixelBufferRef)pixelBuffer;
        CVPixelBufferLockBaseAddress(pb, kCVPixelBufferLock_ReadOnly);
        OSStatus res = CreateCGImageFromCVPixelBuffer(pb,&cgImage);
        if (res == noErr){
            image= [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
        }
        CVPixelBufferUnlockBaseAddress(pb, kCVPixelBufferLock_ReadOnly);
        CGImageRelease(cgImage);
    }
    return image;
}

static OSStatus CreateCGImageFromCVPixelBuffer(CVPixelBufferRef pixelBuffer, CGImageRef *imageOut)
{
    OSStatus err = noErr;
    OSType sourcePixelFormat;
    size_t width, height, sourceRowBytes;
    void *sourceBaseAddr = NULL;
    CGBitmapInfo bitmapInfo;
    CGColorSpaceRef colorspace = NULL;
    CGDataProviderRef provider = NULL;
    CGImageRef image = NULL;
    sourcePixelFormat = CVPixelBufferGetPixelFormatType( pixelBuffer );
    if ( kCVPixelFormatType_32ARGB == sourcePixelFormat )
        bitmapInfo = kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipFirst;
    else if ( kCVPixelFormatType_32BGRA == sourcePixelFormat )
        bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst;
    else
        return -95014; // only uncompressed pixel formats
    sourceRowBytes = CVPixelBufferGetBytesPerRow( pixelBuffer );
    width = CVPixelBufferGetWidth( pixelBuffer );
    height = CVPixelBufferGetHeight( pixelBuffer );
    CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
    sourceBaseAddr = CVPixelBufferGetBaseAddress( pixelBuffer );
    colorspace = CGColorSpaceCreateDeviceRGB();
    CVPixelBufferRetain( pixelBuffer );
    provider = CGDataProviderCreateWithData( (void *)pixelBuffer, sourceBaseAddr, sourceRowBytes * height, ReleaseCVPixelBuffer);
    image = CGImageCreate(width, height, 8, 32, sourceRowBytes, colorspace, bitmapInfo, provider, NULL, true, kCGRenderingIntentDefault);
    if ( err && image ) {
        CGImageRelease( image );
        image = NULL;
    }
    if ( provider ) CGDataProviderRelease( provider );
    if ( colorspace ) CGColorSpaceRelease( colorspace );
    *imageOut = image;
    return err;
}

-(UIImage *)imageFromCVPixelBufferRef2:(CVPixelBufferRef)pixelBuffer{
    CVPixelBufferRef pb = (CVPixelBufferRef)pixelBuffer;
    CVPixelBufferLockBaseAddress(pb, kCVPixelBufferLock_ReadOnly);
    int w = CVPixelBufferGetWidth(pb);
    int h = CVPixelBufferGetHeight(pb);
    int r = CVPixelBufferGetBytesPerRow(pb);
    int bytesPerPixel = r/w;
    unsigned char *buffer =(unsigned char *) CVPixelBufferGetBaseAddress(pb);
    UIGraphicsBeginImageContext(CGSizeMake(w, h));
    CGContextRef c = UIGraphicsGetCurrentContext();
    unsigned char *data =(unsigned char *)CGBitmapContextGetData(c);
    if (data != NULL) {
        int maxY = h;
        for(int y = 0; y<maxY; y++) {
            for(int x = 0; x<w; x++) {
                int offset = bytesPerPixel*((w*y)+x);
                data[offset] = buffer[offset];     // R
                data[offset+1] = buffer[offset+1]; // G
                data[offset+2] = buffer[offset+2]; // B
                data[offset+3] = buffer[offset+3]; // A
            }
        }
    }
    UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    CVPixelBufferUnlockBaseAddress(pb, kCVPixelBufferLock_ReadOnly);
    return img;
}


static void ReleaseCVPixelBuffer(void *pixel, const void *data, size_t size)
{
    CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixel;
    CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
    CVPixelBufferRelease( pixelBuffer );
}

-(void)saveImageToDomainsWithDirectorystringByAppendingPathComponent:(NSString *)suffix WithImage:(UIImage *)image
{
    //JEPG格式
    //压缩图片
    NSData *imagedata=UIImageJPEGRepresentation(image,1.0);
    //获取沙盒路径
    NSArray*paths=NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
    //获取documents路径
    NSString *documentsDirectory=[paths objectAtIndex:0];
    //添加文件名及后缀
    NSString *savedImagePath=[documentsDirectory stringByAppendingPathComponent:suffix];
    //写入文件
    [imagedata writeToFile:savedImagePath atomically:YES];
    
}

-(void)saveImageToDocumentsPath:(NSString *) suffix With: (cv::Mat) mat {
    //获取沙盒路径
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    //获取documents路径
    NSString *documentDir = [paths objectAtIndex:0];
    //添加文件名及后缀
    NSString *savedImagePath=[documentDir stringByAppendingPathComponent:suffix];
    cv:: imwrite([savedImagePath UTF8String], mat);
}

- (UIImage*)imageFromPixelBuffer:(CMSampleBufferRef)p {
    CVImageBufferRef buffer;
    buffer = CMSampleBufferGetImageBuffer(p);
    
    CVPixelBufferLockBaseAddress(buffer, 0);
    uint8_t *base;
    size_t width, height, bytesPerRow;
    base = (uint8_t *)CVPixelBufferGetBaseAddress(buffer);
    width = CVPixelBufferGetWidth(buffer);
    height = CVPixelBufferGetHeight(buffer);
    bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);
    
    CGColorSpaceRef colorSpace;
    CGContextRef cgContext;
    colorSpace = CGColorSpaceCreateDeviceRGB();
    cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGColorSpaceRelease(colorSpace);
    
    CGImageRef cgImage;
    UIImage *image;
    cgImage = CGBitmapContextCreateImage(cgContext);
    image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    CGContextRelease(cgContext);
    
    CVPixelBufferUnlockBaseAddress(buffer, 0);
    
    
    return image;
}

-(NSArray *) getAffineLandmarks:(NSArray *) points;
{
    //    std::vector<int>
    float pointNum[136];
    int facePointNum[136];
    
    NSMutableArray *array = [[NSMutableArray alloc]init];
    
    for (int i = 0; i<points.count; i++) {
//    std::cout <<"OcObj.h中执行affineLandmarks之前的点位值是" <<facePointNum[i]<< std::endl;
        NSLog(@"OcObj.h中执行affineLandmarks之前的点位值是:%.5f",points[i]);
        pointNum[i]=  [points[i] floatValue];
    }
    
    tempImageOne->affineLandmarks(pointNum, facePointNum, dstRot, dstTrans);
    
    for (int i = 0; i<136; i++) {
        
        std::cout <<"OcObj.h中执行affineLandmarks之后的点位值是:" <<facePointNum[i]<< std::endl;
        NSNumber *number =[NSNumber numberWithInt:facePointNum[i]];
        [array addObject:number];
    }
    
    return array;
}

@end
