/*
 *  Copyright 2011 The LibYuv Project Authors. All rights reserved.
 *
 *  Use of this source code is governed by a BSD-style license
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
 *  in the file PATENTS. All contributing project authors may
 *  be found in the AUTHORS file in the root of the source tree.
 */

#include "libyuv/convert.h"
#include "convert_jpeg.h"
#include "libyuv/convert_argb.h"
#include "libyuv/video_common.h"

//#ifdef HAVE_JPEG
#include "libyuv/mjpeg_decoder.h"
#include "ImageProc.h"
#include "mylog.h"
//#endif

#ifdef __cplusplus
namespace libyuv {
extern "C" {
#endif

//#ifdef HAVE_JPEG
struct I420Buffers {
  uint8_t* y;
  int y_stride;
  uint8_t* u;
  int u_stride;
  uint8_t* v;
  int v_stride;
  int w;
  int h;
};

static void JpegCopyI420(void* opaque,
                         const uint8_t* const* data,
                         const int* strides,
                         int rows) {
  I420Buffers* dest = (I420Buffers*)(opaque);
  I420Copy(data[0], strides[0], data[1], strides[1], data[2], strides[2],
           dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
           dest->v_stride, dest->w, rows);
  dest->y += rows * dest->y_stride;
  dest->u += ((rows + 1) >> 1) * dest->u_stride;
  dest->v += ((rows + 1) >> 1) * dest->v_stride;
  dest->h -= rows;
}

static void JpegI422ToI420(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  I420Buffers* dest = (I420Buffers*)(opaque);

  //double dur;
  //clock_t start,end;
 // start = clock();
  I422ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
             dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
             dest->v_stride, dest->w, rows);

  dest->y += rows * dest->y_stride;
  dest->u += ((rows + 1) >> 1) * dest->u_stride;
  dest->v += ((rows + 1) >> 1) * dest->v_stride;
  dest->h -= rows;
  //end = clock();
  //dur = (double)(end - start);

  //LOGE("I422ToI420 time = %lf", dur);
}

static void JpegI444ToI420(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  I420Buffers* dest = (I420Buffers*)(opaque);
  I444ToI420(data[0], strides[0], data[1], strides[1], data[2], strides[2],
             dest->y, dest->y_stride, dest->u, dest->u_stride, dest->v,
             dest->v_stride, dest->w, rows);
  dest->y += rows * dest->y_stride;
  dest->u += ((rows + 1) >> 1) * dest->u_stride;
  dest->v += ((rows + 1) >> 1) * dest->v_stride;
  dest->h -= rows;
}

static void JpegI400ToI420(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  I420Buffers* dest = (I420Buffers*)(opaque);
  I400ToI420(data[0], strides[0], dest->y, dest->y_stride, dest->u,
             dest->u_stride, dest->v, dest->v_stride, dest->w, rows);
  dest->y += rows * dest->y_stride;
  dest->u += ((rows + 1) >> 1) * dest->u_stride;
  dest->v += ((rows + 1) >> 1) * dest->v_stride;
  dest->h -= rows;
}

// Query size of MJPG in pixels.
LIBYUV_API
int MJPGSize(const uint8_t* sample,
             size_t sample_size,
             int* width,
             int* height) {
  MJpegDecoder mjpeg_decoder;
  LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  if (ret) {
    *width = mjpeg_decoder.GetWidth();
    *height = mjpeg_decoder.GetHeight();
  }
  mjpeg_decoder.UnloadFrame();
  return ret ? 0 : -1;  // -1 for runtime failure.
}

// MJPG (Motion JPeg) to I420
// TODO(fbarchard): review src_width and src_height requirement. dst_width and
// dst_height may be enough.
LIBYUV_API
int MJPGToI420(const uint8_t* sample,
               size_t sample_size,
               uint8_t* dst_y,
               int dst_stride_y,
               uint8_t* dst_u,
               int dst_stride_u,
               uint8_t* dst_v,
               int dst_stride_v,
               int src_width,
               int src_height,
               int dst_width,
               int dst_height) {
  if (sample_size == kUnknownDataSize) {
    // ERROR: MJPEG frame size unknown
    return -1;
  }

  // TODO(fbarchard): Port MJpeg to C.

  MJpegDecoder mjpeg_decoder;
  LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  if (ret && (mjpeg_decoder.GetWidth() != src_width ||
              mjpeg_decoder.GetHeight() != src_height)) {
    // ERROR: MJPEG frame has unexpected dimensions
    mjpeg_decoder.UnloadFrame();
    return 1;  // runtime failure
  }


  if (ret) {
    I420Buffers bufs = {dst_y, dst_stride_y, dst_u,     dst_stride_u,
                        dst_v, dst_stride_v, dst_width, dst_height};
    // YUV420
    if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
        mjpeg_decoder.GetNumComponents() == 3 &&
        mjpeg_decoder.GetVertSampFactor(0) == 2 &&
        mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
        mjpeg_decoder.GetVertSampFactor(1) == 1 &&
        mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
        mjpeg_decoder.GetVertSampFactor(2) == 1 &&
        mjpeg_decoder.GetHorizSampFactor(2) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegCopyI420, &bufs, dst_width,
                                           dst_height);
      // YUV422
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceYCbCr &&
               mjpeg_decoder.GetNumComponents() == 3 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
               mjpeg_decoder.GetVertSampFactor(1) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
               mjpeg_decoder.GetVertSampFactor(2) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(2) == 1) {

	   ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToI420, &bufs, dst_width,
			dst_height);
      // YUV444
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceYCbCr &&
               mjpeg_decoder.GetNumComponents() == 3 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
               mjpeg_decoder.GetVertSampFactor(1) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
               mjpeg_decoder.GetVertSampFactor(2) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(2) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToI420, &bufs, dst_width,
                                           dst_height);
      // YUV400
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceGrayscale &&
               mjpeg_decoder.GetNumComponents() == 1 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToI420, &bufs, dst_width,
                                           dst_height);
    } else {
      // TODO(fbarchard): Implement conversion for any other colorspace/sample
      // factors that occur in practice.
      // ERROR: Unable to convert MJPEG frame because format is not supported
      mjpeg_decoder.UnloadFrame();
      return 1;
    }
  }
  return ret ? 0 : 1;
}

//#ifdef HAVE_JPEG

struct ARGBBuffers {
  uint8_t* argb;
  int argb_stride;
  int w;
  int h;
};

static void JpegI420ToARGB(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  I420ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
             dest->argb, dest->argb_stride, dest->w, rows);
  dest->argb += rows * dest->argb_stride;
  dest->h -= rows;
}

static void JpegI422ToARGB(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  I422ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
             dest->argb, dest->argb_stride, dest->w, rows);
  dest->argb += rows * dest->argb_stride;
  dest->h -= rows;
}

static void JpegI444ToARGB(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  I444ToARGB(data[0], strides[0], data[1], strides[1], data[2], strides[2],
             dest->argb, dest->argb_stride, dest->w, rows);
  dest->argb += rows * dest->argb_stride;
  dest->h -= rows;
}

static void JpegI400ToARGB(void* opaque,
                           const uint8_t* const* data,
                           const int* strides,
                           int rows) {
  ARGBBuffers* dest = (ARGBBuffers*)(opaque);
  I400ToARGB(data[0], strides[0], dest->argb, dest->argb_stride, dest->w, rows);
  dest->argb += rows * dest->argb_stride;
  dest->h -= rows;
}

// MJPG (Motion JPeg) to ARGB
// TODO(fbarchard): review src_width and src_height requirement. dst_width and
// dst_height may be enough.
LIBYUV_API
int MJPGToARGB(const uint8_t* sample,
               size_t sample_size,
               uint8_t* dst_argb,
               int dst_stride_argb,
               int src_width,
               int src_height,
               int dst_width,
               int dst_height) {
  if (sample_size == kUnknownDataSize) {
    // ERROR: MJPEG frame size unknown
    return -1;
  }

  // TODO(fbarchard): Port MJpeg to C.
  MJpegDecoder mjpeg_decoder;
  LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  if (ret && (mjpeg_decoder.GetWidth() != src_width ||
              mjpeg_decoder.GetHeight() != src_height)) {
    // ERROR: MJPEG frame has unexpected dimensions
    mjpeg_decoder.UnloadFrame();
    return 1;  // runtime failure
  }
  if (ret) {
    ARGBBuffers bufs = {dst_argb, dst_stride_argb, dst_width, dst_height};
    // YUV420
    if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
        mjpeg_decoder.GetNumComponents() == 3 &&
        mjpeg_decoder.GetVertSampFactor(0) == 2 &&
        mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
        mjpeg_decoder.GetVertSampFactor(1) == 1 &&
        mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
        mjpeg_decoder.GetVertSampFactor(2) == 1 &&
        mjpeg_decoder.GetHorizSampFactor(2) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI420ToARGB, &bufs, dst_width,
                                           dst_height);
      // YUV422
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceYCbCr &&
               mjpeg_decoder.GetNumComponents() == 3 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
               mjpeg_decoder.GetVertSampFactor(1) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
               mjpeg_decoder.GetVertSampFactor(2) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(2) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI422ToARGB, &bufs, dst_width,
                                           dst_height);
      // YUV444
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceYCbCr &&
               mjpeg_decoder.GetNumComponents() == 3 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
               mjpeg_decoder.GetVertSampFactor(1) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
               mjpeg_decoder.GetVertSampFactor(2) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(2) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI444ToARGB, &bufs, dst_width,
                                           dst_height);
      // YUV400
    } else if (mjpeg_decoder.GetColorSpace() ==
                   MJpegDecoder::kColorSpaceGrayscale &&
               mjpeg_decoder.GetNumComponents() == 1 &&
               mjpeg_decoder.GetVertSampFactor(0) == 1 &&
               mjpeg_decoder.GetHorizSampFactor(0) == 1) {
      ret = mjpeg_decoder.DecodeToCallback(&JpegI400ToARGB, &bufs, dst_width,
                                           dst_height);
    } else {
      // TODO(fbarchard): Implement conversion for any other colorspace/sample
      // factors that occur in practice.
      // ERROR: Unable to convert MJPEG frame because format is not supported
      mjpeg_decoder.UnloadFrame();
      return 1;
    }
  }
  return ret ? 0 : 1;
}



// Convert camera sample to I420 with cropping, rotation and vertical flip.
// src_width is used for source stride computation
// src_height is used to compute location of planes, and indicate inversion
// sample_size is measured in bytes and is the size of the frame.
//   With MJPEG it is the compressed size of the frame.
LIBYUV_API
	int ConvertToI420(const uint8_t* sample,
	size_t sample_size,
	uint8_t* dst_y,
	int dst_stride_y,
	uint8_t* dst_u,
	int dst_stride_u,
	uint8_t* dst_v,
	int dst_stride_v,
	int crop_x,
	int crop_y,
	int src_width,
	int src_height,
	int crop_width,
	int crop_height,
	enum RotationMode rotation,
	uint32_t fourcc) {
		uint32_t format = CanonicalFourCC(fourcc);
		int aligned_src_width = (src_width + 1) & ~1;
		const uint8_t* src;
		const uint8_t* src_uv;
		const int abs_src_height = (src_height < 0) ? -src_height : src_height;
		// TODO(nisse): Why allow crop_height < 0?
		const int abs_crop_height = (crop_height < 0) ? -crop_height : crop_height;
		int r = 0;
		LIBYUV_BOOL need_buf =
			(rotation && format != FOURCC_I420 && format != FOURCC_NV12 &&
			format != FOURCC_NV21 && format != FOURCC_YV12) ||
			dst_y == sample;
		uint8_t* tmp_y = dst_y;
		uint8_t* tmp_u = dst_u;
		uint8_t* tmp_v = dst_v;
		int tmp_y_stride = dst_stride_y;
		int tmp_u_stride = dst_stride_u;
		int tmp_v_stride = dst_stride_v;
		uint8_t* rotate_buffer = NULL;
		const int inv_crop_height =
			(src_height < 0) ? -abs_crop_height : abs_crop_height;

		if (!dst_y || !dst_u || !dst_v || !sample || src_width <= 0 ||
			crop_width <= 0 || src_height == 0 || crop_height == 0) {
				return -1;
		}

		// One pass rotation is available for some formats. For the rest, convert
		// to I420 (with optional vertical flipping) into a temporary I420 buffer,
		// and then rotate the I420 to the final destination buffer.
		// For in-place conversion, if destination dst_y is same as source sample,
		// also enable temporary buffer.
		if (need_buf) {
			int y_size = crop_width * abs_crop_height;
			int uv_size = ((crop_width + 1) / 2) * ((abs_crop_height + 1) / 2);
			rotate_buffer = (uint8_t*)malloc(y_size + uv_size * 2); /* NOLINT */
			if (!rotate_buffer) {
				return 1;  // Out of memory runtime error.
			}
			dst_y = rotate_buffer;
			dst_u = dst_y + y_size;
			dst_v = dst_u + uv_size;
			dst_stride_y = crop_width;
			dst_stride_u = dst_stride_v = ((crop_width + 1) / 2);
		}
		LOGE("11111111111111111111111111 format = %d", format);
		switch (format) {
			// Single plane formats
		case FOURCC_YUY2:
			src = sample + (aligned_src_width * crop_y + crop_x) * 2;
			r = YUY2ToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_UYVY:
			src = sample + (aligned_src_width * crop_y + crop_x) * 2;
			r = UYVYToI420(src, aligned_src_width * 2, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_RGBP:
			src = sample + (src_width * crop_y + crop_x) * 2;
			r = RGB565ToI420(src, src_width * 2, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_RGBO:
			src = sample + (src_width * crop_y + crop_x) * 2;
			r = ARGB1555ToI420(src, src_width * 2, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_R444:
			src = sample + (src_width * crop_y + crop_x) * 2;
			r = ARGB4444ToI420(src, src_width * 2, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_24BG:
			src = sample + (src_width * crop_y + crop_x) * 3;
			r = RGB24ToI420(src, src_width * 3, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_RAW:
			src = sample + (src_width * crop_y + crop_x) * 3;
			r = RAWToI420(src, src_width * 3, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_ARGB:
			src = sample + (src_width * crop_y + crop_x) * 4;
			r = ARGBToI420(src, src_width * 4, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_BGRA:
			src = sample + (src_width * crop_y + crop_x) * 4;
			r = BGRAToI420(src, src_width * 4, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_ABGR:
			src = sample + (src_width * crop_y + crop_x) * 4;
			r = ABGRToI420(src, src_width * 4, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
		case FOURCC_RGBA:
			src = sample + (src_width * crop_y + crop_x) * 4;
			r = RGBAToI420(src, src_width * 4, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, crop_width,
				inv_crop_height);
			break;
			// TODO(fbarchard): Add AR30 and AB30
		case FOURCC_I400:
			src = sample + src_width * crop_y + crop_x;
			r = I400ToI420(src, src_width, dst_y, dst_stride_y, dst_u, dst_stride_u,
				dst_v, dst_stride_v, crop_width, inv_crop_height);
			break;
			// Biplanar formats
		case FOURCC_NV12:
			src = sample + (src_width * crop_y + crop_x);
			src_uv = sample + (src_width * abs_src_height) +
				((crop_y / 2) * aligned_src_width) + ((crop_x / 2) * 2);
			r = NV12ToI420Rotate(src, src_width, src_uv, aligned_src_width, dst_y,
				dst_stride_y, dst_u, dst_stride_u, dst_v,
				dst_stride_v, crop_width, inv_crop_height, rotation);
			break;
		case FOURCC_NV21:
			src = sample + (src_width * crop_y + crop_x);
			src_uv = sample + (src_width * abs_src_height) +
				((crop_y / 2) * aligned_src_width) + ((crop_x / 2) * 2);
			// Call NV12 but with dst_u and dst_v parameters swapped.
			r = NV12ToI420Rotate(src, src_width, src_uv, aligned_src_width, dst_y,
				dst_stride_y, dst_v, dst_stride_v, dst_u,
				dst_stride_u, crop_width, inv_crop_height, rotation);
			break;
		case FOURCC_M420:
			src = sample + (src_width * crop_y) * 12 / 8 + crop_x;
			r = M420ToI420(src, src_width, dst_y, dst_stride_y, dst_u, dst_stride_u,
				dst_v, dst_stride_v, crop_width, inv_crop_height);
			break;
			// Triplanar formats
		case FOURCC_I420:
		case FOURCC_YV12: {
			const uint8_t* src_y = sample + (src_width * crop_y + crop_x);
			const uint8_t* src_u;
			const uint8_t* src_v;
			int halfwidth = (src_width + 1) / 2;
			int halfheight = (abs_src_height + 1) / 2;
			if (format == FOURCC_YV12) {
				src_v = sample + src_width * abs_src_height +
					(halfwidth * crop_y + crop_x) / 2;
				src_u = sample + src_width * abs_src_height +
					halfwidth * (halfheight + crop_y / 2) + crop_x / 2;
			} else {
				src_u = sample + src_width * abs_src_height +
					(halfwidth * crop_y + crop_x) / 2;
				src_v = sample + src_width * abs_src_height +
					halfwidth * (halfheight + crop_y / 2) + crop_x / 2;
			}
			r = I420Rotate(src_y, src_width, src_u, halfwidth, src_v, halfwidth,
				dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
				dst_stride_v, crop_width, inv_crop_height, rotation);
			break;
						  }
		case FOURCC_I422:
		case FOURCC_YV16: {
			LOGE("2222222222222222");
			const uint8_t* src_y = sample + src_width * crop_y + crop_x;
			const uint8_t* src_u;
			const uint8_t* src_v;
			int halfwidth = (src_width + 1) / 2;
			if (format == FOURCC_YV16) {
				src_v = sample + src_width * abs_src_height + halfwidth * crop_y +
					crop_x / 2;
				src_u = sample + src_width * abs_src_height +
					halfwidth * (abs_src_height + crop_y) + crop_x / 2;
			} else {
				src_u = sample + src_width * abs_src_height + halfwidth * crop_y +
					crop_x / 2;
				src_v = sample + src_width * abs_src_height +
					halfwidth * (abs_src_height + crop_y) + crop_x / 2;
			}
			LOGE("33333333333333");
			LOGE("src_y  = %d , src_width  = %d , src_u  = %d , halfwidth  = %d , src_v  = %d , halfwidth  = %d , dst_y  = %d , dst_stride_y  = %d , dst_u  = %d , dst_stride_u  = %d , dst_v  = %d ,dst_stride_v  = %d , crop_width  = %d , inv_crop_height  = %d ", src_y, src_width, src_u, halfwidth, src_v, halfwidth,
				dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
				dst_stride_v, crop_width, inv_crop_height);
			r = I422ToI420(src_y, src_width, src_u, halfwidth, src_v, halfwidth,
				dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
				dst_stride_v, crop_width, inv_crop_height);
			LOGE("4444444444444");
			break;
						  }
		case FOURCC_I444:
		case FOURCC_YV24: {
			const uint8_t* src_y = sample + src_width * crop_y + crop_x;
			const uint8_t* src_u;
			const uint8_t* src_v;
			if (format == FOURCC_YV24) {
				src_v = sample + src_width * (abs_src_height + crop_y) + crop_x;
				src_u = sample + src_width * (abs_src_height * 2 + crop_y) + crop_x;
			} else {
				src_u = sample + src_width * (abs_src_height + crop_y) + crop_x;
				src_v = sample + src_width * (abs_src_height * 2 + crop_y) + crop_x;
			}
			r = I444ToI420(src_y, src_width, src_u, src_width, src_v, src_width,
				dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
				dst_stride_v, crop_width, inv_crop_height);
			break;
						  }
#ifdef HAVE_JPEG
		case FOURCC_MJPG:
			r = MJPGToI420(sample, sample_size, dst_y, dst_stride_y, dst_u,
				dst_stride_u, dst_v, dst_stride_v, src_width,
				abs_src_height, crop_width, inv_crop_height);
			break;
#endif
		default:
			r = -1;  // unknown fourcc - return failure code.
		}

		if (need_buf) {
			if (!r) {
				r = I420Rotate(dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
					dst_stride_v, tmp_y, tmp_y_stride, tmp_u, tmp_u_stride,
					tmp_v, tmp_v_stride, crop_width, abs_crop_height,
					rotation);
			}
			free(rotate_buffer);
		}

		return r;
}

// 422 chroma is 1/2 width, 1x height
// 420 chroma is 1/2 width, 1/2 height
LIBYUV_API
	int ConvertI422ToI420(const uint8_t* src_y,
	int src_stride_y,
	const uint8_t* src_u,
	int src_stride_u,
	const uint8_t* src_v,
	int src_stride_v,
	uint8_t* dst_y,
	int dst_stride_y,
	uint8_t* dst_u,
	int dst_stride_u,
	uint8_t* dst_v,
	int dst_stride_v,
	int width,
	int height) {

		/*LOGE("src_y = %d, src_stride_y = %d, src_u = %d, src_stride_u = %d, src_v = %d, src_stride_v = %d, dst_y = %d, dst_stride_y = %d, dst_u = %d, dst_stride_u = %d, dst_v = %d, dst_stride_v = %d, width = %d, height = %d", src_y,
		src_stride_y,
		src_u,
		src_stride_u,
		src_v,
		src_stride_v,
		dst_y,
		dst_stride_y,
		dst_u,
		dst_stride_u,
		dst_v,
		dst_stride_v,
		width,
		height);*/

		return I422ToI420(src_y,
			src_stride_y,
			src_u,
			src_stride_u,
			src_v,
			src_stride_v,
			dst_y,
			dst_stride_y,
			dst_u,
			dst_stride_u,
			dst_v,
			dst_stride_v,
			width,
			height);
}

//#endif

//#endif

#ifdef __cplusplus
}  // extern "C"
}  // namespace libyuv
#endif
