#include <stdio.h>
#include <string.h>
#include <math.h>
#ifndef SOFTWARE_NAME
#define SOFTWARE_NAME "fppartialdischarge"
#endif
#ifndef VERSION
#define VERSION "NO VERSION"
#endif
#include <stdio.h>
#include <stdint.h>
#include <stdio.h>
#include <stdint.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>

#include <libxml/encoding.h>
#include <libxml/xmlwriter.h>
#include <libxml/parser.h>
#include <png.h>

int save_png(const char *filename, uint8_t *data, int width, int height)
{
  FILE *fp = fopen(filename, "wb");
  if (!fp) {
    fprintf(stderr, "Could not open file for writing: %s\n", filename);
    return -1;
  }

  png_structp png = png_create_write_struct(
      PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
  if (!png) {
    fclose(fp);
    fprintf(stderr, "Could not create PNG write structure\n");
    return -1;
  }

  png_infop info = png_create_info_struct(png);
  if (!info) {
    png_destroy_write_struct(&png, NULL);
    fclose(fp);
    fprintf(stderr, "Could not create PNG info structure\n");
    return -1;
  }
  if (setjmp(png_jmpbuf(png))) {
    png_destroy_write_struct(&png, &info);
    fclose(fp);
    return -1;
  }

  png_init_io(png, fp);
  png_set_IHDR(png, info, width, height, 8, PNG_COLOR_TYPE_GRAY,
      PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_BASE,
      PNG_FILTER_TYPE_BASE);
  png_write_info(png, info);

  for (int y = 0; y < height; y++) {
    png_write_row(png, data + y * width);
  }

  png_write_end(png, NULL);

  png_destroy_write_struct(&png, &info);
  fclose(fp);

  return 0;
}

AVFrame *sws_frame(AVFrame *frame, int pix_fmt, AVCodecContext *ctx)
{
  AVFrame *frame_dst = NULL;
  struct SwsContext *sws_context = NULL;
  int width = frame->width, height = frame->height;
  int ret;

  frame_dst = av_frame_alloc();
  if (!frame_dst) {
    fprintf(stderr, "Could not allocate video frame\n");
    exit(1);
  }

  frame_dst->format = pix_fmt;
  frame_dst->width  = width;
  frame_dst->height = height;

  ret = av_frame_get_buffer(frame_dst, 32);
  if (ret < 0) {
    fprintf(stderr, "Could not allocate frame data.\n");
    exit(1);
  }
  sws_context = sws_getContext(
      ctx->width, ctx->height, ctx->pix_fmt,
      width, height, pix_fmt,
      SWS_BILINEAR, NULL, NULL, NULL);
  sws_scale(sws_context,
      (const uint8_t *const *)frame->data, frame->linesize,
      0, frame->height, frame_dst->data, frame_dst->linesize);
  sws_freeContext(sws_context);
  return frame_dst;
}

void draw_line(uint8_t *data, int width, int height,
    int start_x, int start_y, int end_x, int end_y,
    uint8_t r, uint8_t g, uint8_t b)
{
  // 使用Bresenham算法画线（适用于任何颜色）
  int dx = abs(end_x - start_x), sx = start_x < end_x ? 1 : -1;
  int dy = abs(end_y - start_y), sy = start_y < end_y ? 1 : -1;
  int err = dx - dy;

  while (1) {
    // 计算像素在数据中的位置 (YUV数据要转化为RGB数据)
    int index = (start_y * width + start_x) * 3;
    if (index < width * height * 3) {
      data[index] = r;     // R
      data[index + 1] = g; // G
      data[index + 2] = b; // B
    }
    if (start_x == end_x && start_y == end_y)
      break;
    int e2 = err * 2;
    if (e2 > -dy) {
      err -= dy;
      start_x += sx;
    }
    if (e2 < dx) {
      err += dx;
      start_y += sy;
    }
  }
}
void edge_detection(uint8_t *data, int width, int height)
{
  int x = 0, y = 0, index, r0, g0, b0, r1, g1, b1;
  int delim = 20;

  for (x = 0; x < width; x++) {
    for (y = 0; y < height; y++) {
      index = (x + width*y)*3;
      r1 = data[index];     // R
      g1 = data[index + 1];     // R
      b1 = data[index + 2];     // R
      if (x != 0) {
        if ((r1 - r0) > delim) {
          /*
        if (((r1 - r0) > delim) ||
            ((g1 - g0) > delim) ||
            ((b1 - b0) > delim)) {
            */
          data[index] = 255;
          data[index + 1] = 0;
          data[index + 2 ] = 0;
        } else {
          data[index] = 0;
          data[index + 1] = 0;
          data[index + 2 ] = 0;
        }
      }
      r0 = r1; g0 = g1; b0 = b1;
      printf("r0:%d r1:%d g0:%d g1:%d b0:%d b1:%d\n",
          r0, r1, g0, g1, b0, b1);
    }
  }
}
#define SOBEL_X_FILTER_SIZE 3
#define SOBEL_Y_FILTER_SIZE 3

// Sobel 边缘检测滤波器
int sobel_filter_x[SOBEL_X_FILTER_SIZE][SOBEL_X_FILTER_SIZE] = {
    {-1, 0, 1},
    {-2, 0, 2},
    {-1, 0, 1}
};

int sobel_filter_y[SOBEL_Y_FILTER_SIZE][SOBEL_Y_FILTER_SIZE] = {
    {-1, -2, -1},
    { 0,  0,  0},
    { 1,  2,  1}
};

void sobel_edge_detection(uint8_t *frame_data, int width, int height)
{
  uint8_t *output_image = (uint8_t *)malloc(width * height);

  for (int y = 1; y < height - 1; y++) {
    for (int x = 1; x < width - 1; x++) {
      int gx = 0;
      int gy = 0;
      for (int ky = -1; ky <= 1; ky++) {
        for (int kx = -1; kx <= 1; kx++) {
          int pixel_value = frame_data[(y + ky) * width + (x + kx)];
          gx += pixel_value * sobel_filter_x[ky + 1][kx + 1];
          gy += pixel_value * sobel_filter_y[ky + 1][kx + 1];
        }
      }
      int g = (int)sqrt(gx * gx + gy * gy);
      if (g > 255) g = 255; // 限制为 255
      output_image[y * width + x] = (uint8_t)g;
    }
  }

  save_png("obj/test1.png", output_image, width, height);
  /*
  FILE *out = fopen("edges_output.raw", "wb");
  fwrite(output_image, 1, width * height, out);
  fclose(out);
*/
  free(output_image);
}
int save_file(AVFrame *frame, const char *filename, int pix_fmt)
{
  FILE *file = NULL;
  int ret = 0;

  file = fopen(filename, "wb");
  //ret = av_image_get_buffer_size(pix_fmt,
  ret = av_image_get_buffer_size(AV_PIX_FMT_RGB24,
      frame->width, frame->height, 1);
  fprintf(file, "P6\n%d %d\n255\n",
      frame->width, frame->height);
  printf("[DEBUG] write %d data\n", ret);
  printf("[DEBUG] width*height=%d\n", frame->width*frame->height);
  //draw_line(frame->data[0], frame->width, frame->height,
   //   50, 50, 200, 200, 255, 0, 0);
  fwrite(frame->data[0], 1, ret, file);
  //edge_detection(frame->data[0], frame->width, frame->height);
  //sobel_edge_detection(frame->data[0], frame->width, frame->height);
  //save_png("obj/test.png", frame->data[0], frame->width, frame->height);
  fclose(file);
  fprintf(stderr,
      "Scaling succeeded.Play output file with the command:\n"
         "xdg-open %s\n", filename);
  return true;
}
void *camera_run(void *ptr)
{
  char *devname = ptr;
  //int ret = 0;
  AVFormatContext *format_ctx = NULL;
  AVCodecContext *codec_ctx = NULL;
  const AVCodec *codec = NULL;
  AVPacket packet;
  AVFrame *frame = NULL;
  int video_stream_index = -1;

  char dst_filename[1024];
  //const AVCodec *enc_codec = NULL;
  AVCodecContext *enc_codec_ctx = NULL;
  //int width = 640, height = 480;

  if (avformat_open_input(&format_ctx, devname, NULL, NULL) != 0) {
    fprintf(stderr, "Failed to open video input\n");
    return NULL;
  }

  if (avformat_find_stream_info(format_ctx, NULL) < 0) {
    fprintf(stderr, "Failed to find stream information\n");
    return NULL;
  }

  for (int i = 0; i < format_ctx->nb_streams; i++) {
    if (format_ctx->streams[i]->codecpar->codec_type ==
        AVMEDIA_TYPE_VIDEO) {
      video_stream_index = i;
      break;
    }
  }

  if (video_stream_index == -1) {
    fprintf(stderr, "No video stream found\n");
    return NULL;
  }

  codec = avcodec_find_decoder(
      format_ctx->streams[video_stream_index]->codecpar->codec_id);
  if (!codec) {
    fprintf(stderr, "Codec not found\n");
    return NULL;
  }

  codec_ctx = avcodec_alloc_context3(codec);
  if (!codec_ctx || avcodec_parameters_to_context(codec_ctx,
        format_ctx->streams[video_stream_index]->codecpar) < 0) {
    fprintf(stderr, "Failed to open codec\n");
    return NULL;
  }

  if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
    fprintf(stderr, "Failed to open codec\n");
    return NULL;
  }

  frame = av_frame_alloc();
  if (!frame) {
    fprintf(stderr, "Failed to allocate frame\n");
    return NULL;
  }

  //FILE *dst_file;
  while (1) {
    if (av_read_frame(format_ctx, &packet) < 0)
      break;
    if (packet.stream_index == video_stream_index) {
      if (avcodec_send_packet(codec_ctx, &packet) < 0) {
        av_packet_unref(&packet);
        continue;
      }
      if (avcodec_receive_frame(codec_ctx, frame) == 0) {
        AVFrame *frame_dst = NULL;
        //AVPacket *pkt;
        int pix_fmt = AV_PIX_FMT_RGB24;
        //int pix_fmt = AV_PIX_FMT_GRAY8;
        frame_dst = sws_frame(frame, pix_fmt, codec_ctx);
        snprintf(dst_filename, sizeof(dst_filename), "obj/test.rgb");
        save_file(frame_dst, dst_filename, pix_fmt);
        exit(1);
        av_frame_free(&frame_dst);
      }
    }
    av_packet_unref(&packet);
  }
  avcodec_free_context(&enc_codec_ctx);

  av_frame_free(&frame);
  avcodec_free_context(&codec_ctx);
  avformat_close_input(&format_ctx);
  return NULL;
}
//.build/x86_64/HMjpegVideo/HMjpegVideo 8888 /dev/video0
int main(int argc, char **argv)
{
  char *dev = "/dev/video0";
  avdevice_register_all();
  camera_run(dev);
  return 0;
}
