#include <stdio.h>
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#include <unistd.h>
#include <pthread.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>

#define MJPEG_BUFFER_SIZE 4096
#define HTTP_PORT 8080

// 摄像头视频流相关的全局变量
AVFormatContext *input_format_context = NULL;
AVCodecContext *codec_context = NULL;
const AVCodec *codec = NULL;
AVStream *video_stream = NULL;
AVFrame *frame = NULL;
AVPacket packet;
struct SwsContext *sws_context = NULL;
int video_stream_index = -1;
int width = 640;
int height = 480;

void send_http_header(int client_fd)
{
  const char *response_header = 
      "HTTP/1.1 200 OK\r\n"
      "Content-Type: multipart/x-mixed-replace; boundary=frame\r\n"
      "Cache-Control: no-cache\r\n"
      "Connection: close\r\n\r\n";
  send(client_fd, response_header, strlen(response_header), 0);
}
void send_mjpeg_frame(int client_fd,
    uint8_t *mjpeg_data, int mjpeg_size)
{
  size_t n;
  char boundary[] = "--frame\r\n";
  char content_header[] = "Content-Type: image/jpeg\r\nContent-Length: %d\r\n\r\n";
  
  send(client_fd, boundary, strlen(boundary), 0);
  char content_header_buffer[1024];
  sprintf(content_header_buffer, content_header, mjpeg_size);
  n = send(client_fd,
      content_header_buffer, strlen(content_header_buffer), 0);
  if (n < 0) {
    fprintf(stderr, "send:%s\n", strerror(errno));
  }
  n = send(client_fd, mjpeg_data, mjpeg_size, 0);
  if (n < 0) {
    fprintf(stderr, "send:%s\n", strerror(errno));
  }
}
int idx = 0;
void *frame_create(AVCodecContext *c)
{
  int i = 0, x, y, ret;
  AVFrame *frame;
  i = idx;

  frame = av_frame_alloc();
  if (!frame) {
    fprintf(stderr, "Could not allocate video frame\n");
    exit(1);
  }
  frame->format = c->pix_fmt;
  frame->width  = c->width;
  frame->height = c->height;

  ret = av_frame_get_buffer(frame, 0);
  if (ret < 0) {
      fprintf(stderr, "Could not allocate the video frame data\n");
      exit(1);
  }
  //for (i = 0; i < 25; i++) {
    fflush(stdout);
    ret = av_frame_make_writable(frame);
    if (ret < 0)
      exit(1);
    /* Y */
    for (y = 0; y < c->height; y++) {
      for (x = 0; x < c->width; x++) {
        frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
      }
    }
    /* Cb and Cr */
    for (y = 0; y < c->height/2; y++) {
      for (x = 0; x < c->width/2; x++) {
        frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
        frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
      }
    }

    frame->pts = i;
    idx++;
    return frame;
}
AVPacket *encode(AVFrame *frame, int fd, int width, int height)
{
  int ret;
  AVPacket *pkt;
  const AVCodec *jpeg_codec = NULL;
  AVCodecContext *jpeg_codec_ctx = NULL;

  jpeg_codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
  if (!jpeg_codec) {
    fprintf(stderr, "Codec '%d' not found\n", AV_CODEC_ID_MJPEG);
    exit(1);
  }
  jpeg_codec_ctx = avcodec_alloc_context3(jpeg_codec);
  if (!jpeg_codec_ctx) {
    fprintf(stderr, "Could not allocate video codec context\n");
    exit(1);
  }
  jpeg_codec_ctx->strict_std_compliance = FF_COMPLIANCE_UNOFFICIAL;

  jpeg_codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
  jpeg_codec_ctx->width = width;
  jpeg_codec_ctx->height = height;
  jpeg_codec_ctx->time_base = (AVRational){1, 25};

  jpeg_codec_ctx->bit_rate = 400000;
  jpeg_codec_ctx->framerate = (AVRational){25, 1};
  jpeg_codec_ctx->gop_size = 10;
  //jpeg_codec_ctx->max_b_frames = 1;
  if (jpeg_codec->id == AV_CODEC_ID_H264)
    av_opt_set(jpeg_codec_ctx->priv_data, "preset", "slow", 0);
  ret = avcodec_open2(jpeg_codec_ctx, jpeg_codec, NULL);
  if (ret < 0) {
    fprintf(stderr, "Could not open codec: %s\n", av_err2str(ret));
    exit(1);
  }

  frame = frame_create(jpeg_codec_ctx);
  pkt = av_packet_alloc();
  if (!pkt)
      exit(1);
  //printf("%d\n", __LINE__);
  if (!frame || !frame->data[0]) {
    fprintf(stderr, "AVFrame is invalid or empty\n");
    return -1;
  }
  if (!jpeg_codec_ctx->codec) {
    fprintf(stderr, "AVCodecContext is invalid or codec is not opened\n");
    return -1;
  }
  fprintf(stderr, "Frame width: %d, height: %d, format: %d\n",
      frame->width, frame->height, frame->format);
  if (frame)
    printf("Send frame %3"PRId64"\n", frame->pts);

  if (avcodec_send_frame(jpeg_codec_ctx, frame) == 0) {
    printf("%d\n", __LINE__);
    if (avcodec_receive_packet(jpeg_codec_ctx, pkt) == 0) {
      printf("%d\n", __LINE__);
      return pkt;
    }
  }
  av_packet_unref(pkt);
  return NULL;
}

static AVFrame *alloc_frame(enum AVPixelFormat pix_fmt,
    int width, int height)
{
  AVFrame *frame;
  int ret;

  frame = av_frame_alloc();
  if (!frame)
    return NULL;

  frame->format = pix_fmt;
  frame->width  = width;
  frame->height = height;

  /* allocate the buffers for the frame data */
  ret = av_frame_get_buffer(frame, 0);
  if (ret < 0) {
    fprintf(stderr, "Could not allocate frame data.\n");
    exit(1);
  }

  return frame;
}
void *camera_stream(void *arg)
{
  AVFrame *frame_rgb = NULL;
  frame_rgb = alloc_frame(AV_PIX_FMT_RGB24, width, height);
  if (!frame_rgb) {
    fprintf(stderr, "Could not allocate video frame\n");
    exit(1);
  }
  /*
  int buffer_size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, width, height, 1);
  uint8_t *buffer = (uint8_t *)av_malloc(buffer_size);
  
  av_image_fill_arrays(frame_rgb->data, frame_rgb->linesize, buffer, AV_PIX_FMT_RGB24, width, height, 1);
  */
  // 获取摄像头视频流
  frame = av_frame_alloc();
  if (!frame) {
    fprintf(stderr, "Error allocating frame\n");
    return NULL;
  } 
  while (1) {
    if (av_read_frame(input_format_context, &packet) >= 0) {
      if (packet.stream_index == video_stream_index) {
        avcodec_send_packet(codec_context, &packet);
        while (avcodec_receive_frame(codec_context, frame) == 0) {
          // 将摄像头视频转码为 MJPEG 格式
          sws_scale(sws_context,
              (const uint8_t *const *)frame->data, frame->linesize,
              0, height, frame_rgb->data, frame_rgb->linesize);
          //frame_rgb = frame_create();
          AVPacket *pkt;
          pkt = encode(frame_rgb, (intptr_t)arg, 640, 480);
          send_mjpeg_frame((intptr_t)arg, pkt->data, pkt->size);
          av_packet_unref(pkt);
          //encode(NULL, (intptr_t)arg, 640, 480);
        }
      }
      av_packet_unref(&packet);
    }
    usleep(10000);  // 暂停以减缓处理速度
  }

  av_frame_free(&frame_rgb);
  return NULL;
}

int start_http_server()
{
  int server_sock, client_fd;
  struct sockaddr_in server_addr, client_addr;
  socklen_t client_len = sizeof(client_addr);

  server_sock = socket(AF_INET, SOCK_STREAM, 0);
  if (server_sock < 0) {
    perror("Unable to create socket");
    return -1;
  }

  memset(&server_addr, 0, sizeof(server_addr));
  server_addr.sin_family = AF_INET;
  server_addr.sin_addr.s_addr = INADDR_ANY;
  server_addr.sin_port = htons(HTTP_PORT);

  if (bind(server_sock,
        (struct sockaddr *)&server_addr, sizeof(server_addr)) < 0) {
    perror("Bind failed");
    return -1;
  }

  listen(server_sock, 5);
  printf("HTTP server listening on port %d...\n", HTTP_PORT);

  while (1) {
    client_fd = accept(server_sock,
        (struct sockaddr *)&client_addr, &client_len);
    if (client_fd < 0) {
      perror("Accept failed");
      continue;
    }
    char buf[1024];
    size_t nread;
    nread = read(client_fd, buf, sizeof(buf));
    if (nread == -1) {
      perror("read");
      exit(EXIT_FAILURE);
    }
    printf("Received %zd bytes: %s\n", nread, buf);

    send_http_header(client_fd);
    while (1) {
          AVPacket *pkt;
          //AVFrame *frame = frame_create(jpeg_codec_ctx);
          pkt = encode(NULL, 0, 640, 480);
          send_mjpeg_frame(client_fd, pkt->data, pkt->size);
          av_packet_unref(pkt);
    }
          /*
    pthread_t stream_thread;
    pthread_create(&stream_thread, NULL, camera_stream,
        (void *)(intptr_t)client_fd);
    pthread_detach(stream_thread);
    */
  }

  return 0;
}

int main()
{
  avdevice_register_all();
  avformat_network_init();

  if (avformat_open_input(&input_format_context,
        "/dev/video0", NULL, NULL) != 0) {
    fprintf(stderr, "Could not open input\n");
    return -1;
  }

  if (avformat_find_stream_info(input_format_context, NULL) < 0) {
    fprintf(stderr, "Could not find stream information\n");
    return -1;
  }

  for (int i = 0; i < input_format_context->nb_streams; i++) {
    if (input_format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
      video_stream_index = i;
      video_stream = input_format_context->streams[i];
      break;
    }
  }

  if (video_stream_index == -1) {
    fprintf(stderr, "No video stream found\n");
    return -1;
  }

  codec = avcodec_find_decoder(video_stream->codecpar->codec_id);
  if (!codec) {
    fprintf(stderr, "Codec not found\n");
    return -1;
  }

  codec_context = avcodec_alloc_context3(codec);
  avcodec_parameters_to_context(codec_context, video_stream->codecpar);

  if (avcodec_open2(codec_context, codec, NULL) < 0) {
    fprintf(stderr, "Failed to open codec\n");
    return -1;
  }

  sws_context = sws_getContext(
      codec_context->width, codec_context->height,
      codec_context->pix_fmt, width, height, AV_PIX_FMT_RGB24,
      SWS_BILINEAR, NULL, NULL, NULL);

  start_http_server();

  avcodec_free_context(&codec_context);
  avformat_close_input(&input_format_context);
  sws_freeContext(sws_context);

  return 0;
}

