/*
  LinPVR - Linux Personal Video Recorder
  Copyright (C) 2006 Kamil Pawlowski <kamilpe@gmail.com>

  This program is free software; you can redistribute it and/or
  modify it under the terms of the GNU General Public License
  as published by the Free Software Foundation; either version 2
  of the License, or (at your option) any later version.
  
  This program is distributed in the hope that it will be useful,
  but WITHOUT ANY WARRANTY; without even the implied warranty of
  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  GNU General Public License for more details.
  
  You should have received a copy of the GNU General Public License
  along with this program; if not, write to the Free Software
  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
*/
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include "capture.h"
#include "ogg_enc.h"
#include "debug.h"

/* --- private functions --- */

int ogg_encode_frame(capture_t *capture);


/* --- bodies of functions ---*/

int ogg_encode_frame(capture_t *capture) {

  ogg_enc_t *ogg_enc = capture->video_proc.encoder;
  ogg_page page;
  ogg_packet packet;
  int x,y,u,v;

#define R(px,py) capture->video_proc.rgb_buffer[((py) * capture->width + (px)) * 3]
#define G(px,py) capture->video_proc.rgb_buffer[((py) * capture->width + (px)) * 3 + 1]
#define B(px,py) capture->video_proc.rgb_buffer[((py) * capture->width + (px)) * 3 + 2]
#define Y(px,py) ogg_enc->yuv.y[(py) * ogg_enc->yuv.y_stride  + (px)]
#define U(px,py) ogg_enc->yuv.u[(py) * ogg_enc->yuv.uv_stride + (px)]
#define V(px,py) ogg_enc->yuv.v[(py) * ogg_enc->yuv.uv_stride + (px)]
#define min(a,b) (((a)<(b))?(a):(b))
#define RGBtoY(r,g,b) min(abs((r) * 2104  + (g) * 4130  + (b) * 802  + 4096 + 131072) >> 13, 235)
#define RGBtoU(r,g,b) min(abs((r) * -1214 + (g) * -2384 + (b) * 3598 + 4096 + 1048576) >> 13, 240)
#define RGBtoV(r,g,b) min(abs((r) * 3598  + (g) * -3013 + (b) * -585 + 4096 + 1048576) >> 13, 240)
  
  for (x=0;x<capture->width;x+=2) 
    for (y=0;y<capture->height;y+=2) {
      unsigned char r1 = R(x, y),     g1 = G(x, y),     b1 = B(x, y);
      unsigned char r2 = R(x+1, y),   g2 = G(x+1, y),   b2 = B(x+1, y);
      unsigned char r3 = R(x, y+1),   g3 = G(x, y+1),   b3 = B(x, y+1);
      unsigned char r4 = R(x+1, y+1), g4 = G(x+1, y+1), b4 = B(x+1, y+1);
      /* Compute a 2x2 Y block from current 2x2 RGB block */
      Y(x,   y)   = RGBtoY(r1,g1,b1); Y(x+1, y)   = RGBtoY(r2,g2,b2);
      Y(x,   y+1) = RGBtoY(r3,g3,b3); Y(x+1, y+1) = RGBtoY(r4,g4,b4);
      /* compute four U samples corresponding to our four RGB pixels */
      u  = RGBtoU(r1,g1,b1);  u += RGBtoU(r2,g2,b2);
      u += RGBtoU(r3,g3,b3);  u += RGBtoU(r4,g4,b4);
      /* compute four V samples corresponding to our four RGB pixels */
      v  = RGBtoV(r1,g1,b1);  v += RGBtoV(r2,g2,b2);
      v += RGBtoV(r3,g3,b3);  v += RGBtoV(r4,g4,b4);
      /* Then we average these four U and V samples that have been accumulated */
      U(x/2,y/2) = u/4;
      V(x/2,y/2) = v/4;
    }

  /* encode theora data */
  theora_encode_YUVin(&ogg_enc->t_state, &ogg_enc->yuv);
  while (theora_encode_packetout(&ogg_enc->t_state, 0, &packet))
    ogg_stream_packetin(&ogg_enc->t_ogg_stream, &packet);

  while (ogg_stream_pageout(&ogg_enc->t_ogg_stream, &page) != 0) {
    fwrite(page.header, 1, page.header_len, ogg_enc->ogg_file);
    fwrite(page.body, 1, page.body_len, ogg_enc->ogg_file);
    capture->transfered += page.header_len;
    capture->transfered += page.body_len;
  }

  return 0;
}

void ogg_enc_init(ogg_enc_t *ogg_enc, capture_t *capture)
{
  DEBUG_ENTER("ogg_enc_init");

  /* fill theora info */
  theora_info_clear(&ogg_enc->t_info);
  ogg_enc->t_info.width                        = capture->width;
  ogg_enc->t_info.height                       = capture->height;
  ogg_enc->t_info.frame_width                  = capture->width;
  ogg_enc->t_info.frame_height                 = capture->height;
  ogg_enc->t_info.offset_x                     = 0;
  ogg_enc->t_info.offset_y                     = 0;
  ogg_enc->t_info.aspect_numerator             = 1;
  ogg_enc->t_info.aspect_denominator           = 1;
  ogg_enc->t_info.target_bitrate               = capture->vb * 1000;
  ogg_enc->t_info.quality                      = 0;
  ogg_enc->t_info.fps_numerator                = capture->fps * 1000000;
  ogg_enc->t_info.fps_denominator              = 1000000;
  ogg_enc->t_info.quick_p                      = 1;
  ogg_enc->t_info.keyframe_auto_p              = 1;
  ogg_enc->t_info.keyframe_frequency           = 64;
  ogg_enc->t_info.keyframe_frequency_force     = 64;
  ogg_enc->t_info.keyframe_data_target_bitrate = ogg_enc->t_info.target_bitrate * 1.5;
  ogg_enc->t_info.keyframe_auto_threshold      = 80;
  ogg_enc->t_info.keyframe_mindistance         = 8;
  ogg_enc->t_info.noise_sensitivity            = 1;
  ogg_enc->t_info.sharpness                    = 2;
  
  if (capture->fps == 25) 
    ogg_enc->t_info.colorspace = OC_CS_ITU_REC_470BG;
  else if (abs(capture->fps-30)<1)
    ogg_enc->t_info.colorspace = OC_CS_ITU_REC_470M;
  else 
    ogg_enc->t_info.colorspace = OC_CS_UNSPECIFIED;
  
  /* yuv buffer data */
  ogg_enc->yuv.y_width   = capture->width;
  ogg_enc->yuv.y_height  = capture->height;
  ogg_enc->yuv.y_stride  = capture->width;
  ogg_enc->yuv.uv_width  = capture->width / 2;
  ogg_enc->yuv.uv_height = capture->height / 2;
  ogg_enc->yuv.uv_stride = ogg_enc->yuv.uv_width;

  /* capture */
  capture->video_proc.oframe  = (vidproc_callback)ogg_encode_frame;
  capture->video_proc.encoder = ogg_enc;
}

int ogg_enc_open(ogg_enc_t *ogg_enc, const char *path)
{
  int ret;
  ogg_page page;
  ogg_packet packet;

  DEBUG_ENTER("ogg_enc_open");

  /* Open destination file */
  ogg_enc->ogg_file = fopen(path, "w+");
  if (ogg_enc->ogg_file == NULL)
    return ERR_OPEN;

  /* Theora stream  */
  srand(time (NULL));
  ogg_stream_init(&ogg_enc->t_ogg_stream, rand());
  theora_encode_init(&ogg_enc->t_state, &ogg_enc->t_info);
  theora_comment_init(&ogg_enc->t_comment);

  /* write header to file */
  theora_encode_header(&ogg_enc->t_state, &packet);
  ogg_stream_packetin(&ogg_enc->t_ogg_stream, &packet);
  if (ogg_stream_pageout(&ogg_enc->t_ogg_stream, &page) != 1) {
    fclose(ogg_enc->ogg_file);
    return ERR_THEORA;
  }
  fwrite(page.header, 1, page.header_len, ogg_enc->ogg_file);
  fwrite(page.body, 1, page.body_len, ogg_enc->ogg_file);

  /* write comment to file */
  theora_comment_add_tag(&ogg_enc->t_comment, "ENCODER", "LinPVR");
  theora_encode_comment(&ogg_enc->t_comment, &packet);
  ogg_stream_packetin(&ogg_enc->t_ogg_stream, &packet);

  /* write tables to file */
  theora_encode_tables(&ogg_enc->t_state, &packet);
  ogg_stream_packetin(&ogg_enc->t_ogg_stream, &packet);

  /* alloc memorty buffers for yuv frame */
  ogg_enc->yuv.y = (unsigned char*) g_malloc(ogg_enc->t_info.frame_width * ogg_enc->t_info.frame_height);
  ogg_enc->yuv.u = (unsigned char*) g_malloc(ogg_enc->t_info.frame_width * ogg_enc->t_info.frame_height / 4);
  ogg_enc->yuv.v = (unsigned char*) g_malloc(ogg_enc->t_info.frame_width * ogg_enc->t_info.frame_height / 4);
  if (ogg_enc->yuv.y == NULL || ogg_enc->yuv.u == NULL || ogg_enc->yuv.v == NULL) {
    fclose(ogg_enc->ogg_file);
    return ERR_MALLOC;
  }    

  return 0;
}

void ogg_enc_close(ogg_enc_t *ogg_enc)
{
  ogg_packet packet;
  ogg_page page;

  DEBUG_ENTER("ogg_enc_close");

  /* flush all ogg stream */
  if (ogg_stream_flush(&ogg_enc->t_ogg_stream, &page) > 0) {
    fwrite(page.header, 1, page.header_len, ogg_enc->ogg_file);
    fwrite(page.body, 1, page.body_len, ogg_enc->ogg_file);
  }

  /* clear objects */
  ogg_stream_clear(&ogg_enc->t_ogg_stream);
  theora_clear(&ogg_enc->t_state);
  theora_comment_clear(&ogg_enc->t_comment);
  theora_info_clear(&ogg_enc->t_info);
  fclose(ogg_enc->ogg_file);

  g_free(ogg_enc->yuv.y);
  g_free(ogg_enc->yuv.u);
  g_free(ogg_enc->yuv.v);
}
