#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <arpa/inet.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>

#define PORT 8080
#define BUFFER_SIZE 4096
void send_http_header(int client_sock)
{
  const char *header =
      "HTTP/1.1 200 OK\r\n"
      "Content-Type: multipart/x-mixed-replace; boundary=--myboundary\r\n"
      "Cache-Control: no-cache\r\n"
      "Connection: keep-alive\r\n\r\n";
  send(client_sock, header, strlen(header), 0);
}
void send_http_frame(int client_sock,
    const uint8_t *data, size_t length)
{
  const char *boundary = "--myboundary\r\n";
  const char *content_type = "Content-Type: image/jpeg\r\n\r\n";
  const char *end_boundary = "\r\n";

  send(client_sock, boundary, strlen(boundary), 0);
  send(client_sock, content_type, strlen(content_type), 0);
  send(client_sock, data, length, 0);
  send(client_sock, end_boundary, strlen(end_boundary), 0);
}
int idx = 0;
void *frame_create(AVCodecContext *c)
{
  int i = 0, x, y, ret;
  AVFrame *frame;
  i = idx;

  frame = av_frame_alloc();
  if (!frame) {
    fprintf(stderr, "Could not allocate video frame\n");
    exit(1);
  }
  frame->format = c->pix_fmt;
  frame->width  = c->width;
  frame->height = c->height;

  ret = av_frame_get_buffer(frame, 0);
  if (ret < 0) {
      fprintf(stderr, "Could not allocate the video frame data\n");
      exit(1);
  }
  //for (i = 0; i < 25; i++) {
    fflush(stdout);
    ret = av_frame_make_writable(frame);
    if (ret < 0)
      exit(1);
    /* Y */
    for (y = 0; y < c->height; y++) {
      for (x = 0; x < c->width; x++) {
        frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
        //frame->data[0][y * frame->linesize[0] + x] = i * 3;
      }
    }
    /* Cb and Cr */
    for (y = 0; y < c->height/2; y++) {
      for (x = 0; x < c->width/2; x++) {
        frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
        frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
        //frame->data[1][y * frame->linesize[1] + x] = 128 + i * 2;
        //frame->data[2][y * frame->linesize[2] + x] = 64 + i * 5;
      }
    }

    frame->pts = i;
    idx++;
    return frame;
}
AVPacket *encode(AVFrame *frame, int fd, int width, int height)
{
  int ret;
  AVPacket *pkt;
  const AVCodec *jpeg_codec = NULL;
  AVCodecContext *jpeg_codec_ctx = NULL;

  jpeg_codec = avcodec_find_encoder(AV_CODEC_ID_MJPEG);
  if (!jpeg_codec) {
    fprintf(stderr, "Codec '%d' not found\n", AV_CODEC_ID_MJPEG);
    exit(1);
  }
  jpeg_codec_ctx = avcodec_alloc_context3(jpeg_codec);
  if (!jpeg_codec_ctx) {
    fprintf(stderr, "Could not allocate video codec context\n");
    exit(1);
  }
  jpeg_codec_ctx->strict_std_compliance = FF_COMPLIANCE_UNOFFICIAL;

  jpeg_codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
  jpeg_codec_ctx->width = width;
  jpeg_codec_ctx->height = height;
  jpeg_codec_ctx->time_base = (AVRational){1, 25};

  jpeg_codec_ctx->bit_rate = 400000;
  jpeg_codec_ctx->framerate = (AVRational){25, 1};
  jpeg_codec_ctx->gop_size = 10;
  //jpeg_codec_ctx->max_b_frames = 1;
  if (jpeg_codec->id == AV_CODEC_ID_H264)
    av_opt_set(jpeg_codec_ctx->priv_data, "preset", "slow", 0);
  ret = avcodec_open2(jpeg_codec_ctx, jpeg_codec, NULL);
  if (ret < 0) {
    fprintf(stderr, "Could not open codec: %s\n", av_err2str(ret));
    exit(1);
  }

  frame = frame_create(jpeg_codec_ctx);
  pkt = av_packet_alloc();
  if (!pkt)
      exit(1);
  //printf("%d\n", __LINE__);
  if (!frame || !frame->data[0]) {
    fprintf(stderr, "AVFrame is invalid or empty\n");
    return NULL;
  }
  if (!jpeg_codec_ctx->codec) {
    fprintf(stderr, "AVCodecContext is invalid or codec is not opened\n");
    return NULL;
  }
  /*
  fprintf(stderr, "Frame width: %d, height: %d, format: %d\n",
      frame->width, frame->height, frame->format);
  if (frame)
    printf("Send frame %3"PRId64"\n", frame->pts);
    */

  if (avcodec_send_frame(jpeg_codec_ctx, frame) == 0) {
    if (avcodec_receive_packet(jpeg_codec_ctx, pkt) == 0) {
      return pkt;
    }
  }
  av_packet_unref(pkt);
  return NULL;
}
void capture_and_stream_video(int client_sock)
{
  AVFormatContext *format_ctx = NULL;
  AVCodecContext *codec_ctx = NULL;
  const AVCodec *codec = NULL;
  AVPacket packet;
  AVFrame *frame = NULL;
  int video_stream_index = -1;
  
  if (avformat_open_input(&format_ctx,
        "/dev/video0", NULL, NULL) != 0) {
    fprintf(stderr, "Failed to open video input\n");
    return;
  }

  if (avformat_find_stream_info(format_ctx, NULL) < 0) {
    fprintf(stderr, "Failed to find stream information\n");
    return;
  }

  for (int i = 0; i < format_ctx->nb_streams; i++) {
    if (format_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
      video_stream_index = i;
      break;
    }
  }

  if (video_stream_index == -1) {
    fprintf(stderr, "No video stream found\n");
    return;
  }

  codec = avcodec_find_decoder(
      format_ctx->streams[video_stream_index]->codecpar->codec_id);
  if (!codec) {
    fprintf(stderr, "Codec not found\n");
    return;
  }

  codec_ctx = avcodec_alloc_context3(codec);
  if (!codec_ctx || avcodec_parameters_to_context(codec_ctx, format_ctx->streams[video_stream_index]->codecpar) < 0) {
    fprintf(stderr, "Failed to open codec\n");
    return;
  }

  if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
    fprintf(stderr, "Failed to open codec\n");
    return;
  }

  frame = av_frame_alloc();
  if (!frame) {
    fprintf(stderr, "Failed to allocate frame\n");
    return;
  }

  send_http_header(client_sock);
  while (1) {
    if (av_read_frame(format_ctx, &packet) < 0)
      break;

    if (packet.stream_index == video_stream_index) {
      if (avcodec_send_packet(codec_ctx, &packet) < 0) {
        av_packet_unref(&packet);
        continue;
      }

      if (avcodec_receive_frame(codec_ctx, frame) == 0) {
        AVPacket *pkt;
        pkt = encode(NULL, 0, 640, 480);
        send_http_frame(client_sock, pkt->data, pkt->size);
        av_packet_unref(pkt);
        //sleep(1);
      }
    }
    av_packet_unref(&packet);
  }

  av_frame_free(&frame);
  avcodec_free_context(&codec_ctx);
  avformat_close_input(&format_ctx);
}

int create_server_socket()
{
  int server_sock;
  struct sockaddr_in server_addr;

  if ((server_sock = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
    perror("Socket creation failed");
    exit(EXIT_FAILURE);
  }

  server_addr.sin_family = AF_INET;
  server_addr.sin_addr.s_addr = INADDR_ANY;
  server_addr.sin_port = htons(PORT);

  if (bind(server_sock, (struct sockaddr *)&server_addr, sizeof(server_addr)) < 0) {
    perror("Bind failed");
    close(server_sock);
    exit(EXIT_FAILURE);
  }

  if (listen(server_sock, 3) < 0) {
    perror("Listen failed");
    close(server_sock);
    exit(EXIT_FAILURE);
  }
  printf("Listening on port %d...\n", PORT);

  return server_sock;
}

int main()
{
  int server_sock, client_sock;
  struct sockaddr_in client_addr;
  socklen_t client_len = sizeof(client_addr);

  avdevice_register_all();

  server_sock = create_server_socket();
  while (1) {
    client_sock = accept(server_sock,
        (struct sockaddr *)&client_addr, &client_len);
    if (client_sock < 0) {
      perror("Accept failed");
      continue;
    }
    printf("Client connected\n");
    capture_and_stream_video(client_sock);
    close(client_sock);
    printf("Client disconnected\n");
  }

  close(server_sock);
  return 0;
}

