//
//  OpenCV.mm
//  OpenCVSample_iOS
//
//  Created by Hiroki Ishiura on 2020/01/04.
//  Copyright © 2020 Hiroki Ishiura. All rights reserved.
//

#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
#import <opencv2/opencv.hpp>
#import <opencv2/imgproc.hpp>
#pragma clang diagnostic pop

#import <UIKit/UIKit.h>
#import "OpenCV.h"
//#import "ViewController-swift.h"

/// Converts an UIImage to Mat.
/// Orientation of UIImage will be lost.
static void UIImageToMat(UIImage *image, cv::Mat &mat) {
	assert(image.size.width > 0 && image.size.height > 0);
	assert(image.CGImage != nil || image.CIImage != nil);

	// Create a pixel buffer.
	NSInteger width = image.size.width;
	NSInteger height = image.size.height;
	cv::Mat mat8uc4 = cv::Mat((int)height, (int)width, CV_8UC4);

	// Draw all pixels to the buffer.
	CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	if (image.CGImage) {
		// Render with using Core Graphics.
		CGContextRef contextRef = CGBitmapContextCreate(mat8uc4.data, mat8uc4.cols, mat8uc4.rows, 8, mat8uc4.step, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrderDefault);
		CGContextDrawImage(contextRef, CGRectMake(0, 0, width, height), image.CGImage);
		CGContextRelease(contextRef);
	} else {
		// Render with using Core Image.
		static CIContext* context = nil; // I do not like this declaration contains 'static'. But it is for performance.
		if (!context) {
			context = [CIContext contextWithOptions:@{ kCIContextUseSoftwareRenderer: @NO }];
		}
		CGRect bounds = CGRectMake(0, 0, width, height);
		[context render:image.CIImage toBitmap:mat8uc4.data rowBytes:mat8uc4.step bounds:bounds format:kCIFormatRGBA8 colorSpace:colorSpace];
	}
	CGColorSpaceRelease(colorSpace);

	// Adjust byte order of pixel.
	cv::Mat mat8uc3 = cv::Mat((int)width, (int)height, CV_8UC3);
	cv::cvtColor(mat8uc4, mat8uc3, cv::COLOR_RGBA2BGR);

	mat = mat8uc3;
}

/// Converts a Mat to UIImage.
static UIImage *MatToUIImage(cv::Mat &mat) {

	// Create a pixel buffer.
	assert(mat.elemSize() == 1 || mat.elemSize() == 3);
	cv::Mat matrgb;
	if (mat.elemSize() == 1) {
		cv::cvtColor(mat, matrgb, cv::COLOR_GRAY2RGB);
	} else if (mat.elemSize() == 3) {
		cv::cvtColor(mat, matrgb, cv::COLOR_BGR2RGB);
	}

	// Change a image format.
	NSData *data = [NSData dataWithBytes:matrgb.data length:(matrgb.elemSize() * matrgb.total())];
	CGColorSpaceRef colorSpace;
	if (matrgb.elemSize() == 1) {
		colorSpace = CGColorSpaceCreateDeviceGray();
	} else {
		colorSpace = CGColorSpaceCreateDeviceRGB();
	}
	CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
	CGImageRef imageRef = CGImageCreate(matrgb.cols, matrgb.rows, 8, 8 * matrgb.elemSize(), matrgb.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
	UIImage *image = [UIImage imageWithCGImage:imageRef];
	CGImageRelease(imageRef);
	CGDataProviderRelease(provider);
	CGColorSpaceRelease(colorSpace);

	return image;
}

/// Restore the orientation to image.
static UIImage *RestoreUIImageOrientation(UIImage *processed, UIImage *original) {
	if (processed.imageOrientation == original.imageOrientation) {
		return processed;
	}
	return [UIImage imageWithCGImage:processed.CGImage scale:1.0 orientation:original.imageOrientation];
}

@implementation OpenCV

//+ (nonnull UIImage *)cvtColorBGR2GRAY:(nonnull UIImage *)image  {
//
//
//	cv::Mat bgrMat;
//	UIImageToMat(image, bgrMat);
//	cv::Mat grayMat;
////	cv::cvtColor(bgrMat, grayMat, cv::COLOR_BGR2GRAY);
//    cv::cvtColor(bgrMat, grayMat, cv::COLOR_LBGR2Lab);
//	UIImage *grayImage = MatToUIImage(grayMat);
//	return RestoreUIImageOrientation(grayImage, image);
//}

//+ (UIImage *)cvtColorBGR2GRAY:(UIImage *)image sec:(UIImage *)sec{
//
//}
+ (UIImage *)cvtColorBGR2GRAY:(UIImage *)image{
    cv::Mat bgrMat;
    UIImageToMat(image, bgrMat);
    cv::Mat hsvMat;
    cv::Mat maskG;
    cv::Mat maskR;
    cv::Mat maskOutput;
    cv::Mat maskOutput1;
//    cv::resize(maskR, maskR, bgrMat.size)
//    cv::resize(maskG, maskG, bgrMat.size)
//    cv::resize(maskOutput, maskOutput, bgrMat.size)
//    cv::resize(maskOutput1, maskOutput1, bgrMat.size)
//    maskG = cv::Mat(bgrMat.size, CV_32FC3);
//    Scalar(hmin, smin / float(smin_Max), vmin / float(vmin_Max)), Scalar(hmax, smax / float(smax_Max), vmax / float(vmax_Max)), mask
    cv::cvtColor(bgrMat, hsvMat, cv::COLOR_BGR2HSV);
//        cv::cvtColor(bgrMat, grayMat, cv::COLOR_LBGR2Lab);
    cv::inRange(hsvMat, cv::Scalar(0.0,43.0,46.0), cv::Scalar(10.0,255.0,255.0), maskR);
    cv::inRange(hsvMat, cv::Scalar(37.0,43.0,46.0), cv::Scalar(77.0,255.0,255.0), maskG);


    cv::bitwise_or(maskG, maskR, maskOutput);
//    cv::bitwise_or(hsvMat, maskOutput, maskOutput1);


//    cv::GaussianBlur(maskOutput, <#OutputArray dst#>, <#Size ksize#>, <#double sigmaX#>)
    UIImage *hsvImage = MatToUIImage(maskOutput);
    return RestoreUIImageOrientation(hsvImage, image);
    
//    cv::Mat bgrMat;
//    cv::Mat hsvMat;
//    cv::Mat maskG;
//    cv::Mat maskR;
//    cv::Mat maskOutput;
//    cv::Mat maskOutput1;
//
//    UIImageToMat(image, bgrMat);
//
//    cv::cvtColor(bgrMat, hsvMat, cv::COLOR_BGR2HSV);
//
//    cv::inRange(hsvMat, cv::Scalar(0.0,43.0,46.0), cv::Scalar(10.0,255.0,255.0), maskR);
//    cv::inRange(hsvMat, cv::Scalar(37.0,43.0,46.0), cv::Scalar(77.0,255.0,255.0), maskG);
//
//    cv::bitwise_or(maskG, maskR, maskOutput);
//    cv::bitwise_or(hsvMat, maskOutput, maskOutput1);
//    cv::GaussianBlur(maskOutput, maskOutput1, bgrMat.size, 33.4);
//    UIImage *hsvImage = MatToUIImage(maskOutput);
//    return RestoreUIImageOrientation(hsvImage, image);

}


@end
