#include <stdio.h>
#include <assert.h>
#include <fcntl.h>
#include <stdlib.h>
#include <string.h>
#include <math.h>
#include <sys/ioctl.h>
#include <sys/signal.h>
#include <sys/mman.h>
#include <unistd.h> 
#include <sys/resource.h> 

#include "fmjpeg_avcodec.h"
#include "../../../libavcodec/avcodec.h"

//#define PATTERN_FILE
//#define MJPG_DEBUG_OUT_FILE
//#define MJPG_DEBUG_IN_FILE
//#define MJPG_PARSING_BS
//#define SHOW_PERFORMANCE_EVALUATION_RESULT
#define NEW_VERSION

#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    #include <sys/time.h>
    typedef unsigned long long uint64;

static void time_start(struct timeval *tv)
{	
      gettimeofday (tv, NULL);
}

static uint64 time_delta(struct timeval *start, struct timeval *stop)
{
      uint64 secs, usecs;

      secs = stop->tv_sec - start->tv_sec;
      usecs = stop->tv_usec - start->tv_usec;
      if (usecs < 0) {
        secs--;
        usecs += 1000000;
      }
      return secs * 1000000 + usecs;
}

static uint64 time_stop(struct timeval *begin)
{
      struct timeval end;
      gettimeofday (&end, NULL);
      return time_delta (begin, &end);
}
#endif

#ifdef MJPG_DEBUG_OUT_FILE
char dout_name[30];
FILE *dout;
int out_ver;
#endif

#ifdef MJPG_DEBUG_IN_FILE
char din_name[30];
FILE *din;
#endif

#undef ABS

#define FMJPEG_DECODER_DEV  "/dev/mjdec" //major:10 minior:60
#define FMJPEG_ENCODER_DEV  "/dev/mjenc" //major:10 minior:61

int fmjpeg_dec_fd=0;
int fmjpeg_enc_fd=0;

#ifdef USE_MMAP
static int dec_mmap_addr,enc_mmap_addr;
#endif

/* JPEG marker codes */
typedef enum {
    /* start of frame */
    SOF0  = 0xc0,	/* baseline */
    SOF1  = 0xc1,	/* extended sequential, huffman */
    SOF2  = 0xc2,	/* progressive, huffman */
    SOF3  = 0xc3,	/* lossless, huffman */

    SOF5  = 0xc5,	/* differential sequential, huffman */
    SOF6  = 0xc6,	/* differential progressive, huffman */
    SOF7  = 0xc7,	/* differential lossless, huffman */
    JPG   = 0xc8,	/* reserved for JPEG extension */
    SOF9  = 0xc9,	/* extended sequential, arithmetic */
    SOF10 = 0xca,	/* progressive, arithmetic */
    SOF11 = 0xcb,	/* lossless, arithmetic */

    SOF13 = 0xcd,	/* differential sequential, arithmetic */
    SOF14 = 0xce,	/* differential progressive, arithmetic */
    SOF15 = 0xcf,	/* differential lossless, arithmetic */

    DHT   = 0xc4,	/* define huffman tables */

    DAC   = 0xcc,	/* define arithmetic-coding conditioning */

    /* restart with modulo 8 count "m" */
    RST0  = 0xd0,
    RST1  = 0xd1,
    RST2  = 0xd2,
    RST3  = 0xd3,
    RST4  = 0xd4,
    RST5  = 0xd5,
    RST6  = 0xd6,
    RST7  = 0xd7,

    SOI   = 0xd8,	/* start of image */
    EOI   = 0xd9,	/* end of image */
    SOS   = 0xda,	/* start of scan */
    DQT   = 0xdb,	/* define quantization tables */
    DNL   = 0xdc,	/* define number of lines */
    DRI   = 0xdd,	/* define restart interval */
    DHP   = 0xde,	/* define hierarchical progression */
    EXP   = 0xdf,	/* expand reference components */

    APP0  = 0xe0,
    APP1  = 0xe1,
    APP2  = 0xe2,
    APP3  = 0xe3,
    APP4  = 0xe4,
    APP5  = 0xe5,
    APP6  = 0xe6,
    APP7  = 0xe7,
    APP8  = 0xe8,
    APP9  = 0xe9,
    APP10 = 0xea,
    APP11 = 0xeb,
    APP12 = 0xec,
    APP13 = 0xed,
    APP14 = 0xee,
    APP15 = 0xef,

    JPG0  = 0xf0,
    JPG1  = 0xf1,
    JPG2  = 0xf2,
    JPG3  = 0xf3,
    JPG4  = 0xf4,
    JPG5  = 0xf5,
    JPG6  = 0xf6,
    JPG7  = 0xf7,
    JPG8  = 0xf8,
    JPG9  = 0xf9,
    JPG10 = 0xfa,
    JPG11 = 0xfb,
    JPG12 = 0xfc,
    JPG13 = 0xfd,
    COM   = 0xfe,	/* comment */
    TEM   = 0x01,	/* temporary private use for arithmetic coding */
    /* 0x02 -> 0xbf reserved */
} JPEGMARKER;

/**
  *
  * Decode Header Parsing Function
  *
  */
typedef struct   {
	unsigned int height;
	unsigned int width;
} JPGCodecContext;


int get_8bits(uint8_t **pbuf_ptr)
{
    uint8_t *buf_ptr;
    int val;

    buf_ptr = *pbuf_ptr;
    val = *buf_ptr++;	
    *pbuf_ptr = buf_ptr;
    return val;
}
unsigned int get_16bits(uint8_t **pbuf_ptr)
{
	return get_8bits(pbuf_ptr) << 8 | get_8bits(pbuf_ptr) ;
}

unsigned int get_24bits(uint8_t **pbuf_ptr)
{
	return get_16bits(pbuf_ptr) << 8 | get_8bits(pbuf_ptr) ;
}

unsigned int get_32bits(uint8_t **pbuf_ptr)
{
	return get_24bits(pbuf_ptr) << 8 | get_8bits(pbuf_ptr) ;
}

static int define_expend_ref_component(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_hierarchical_progress(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_start_of_scan(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	

	return 1;
}

static int define_start_of_image(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_end_of_image(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_number_line(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

#define MAX_COMPONENTS 10
typedef struct SOF_S {
	unsigned int precision;
	unsigned int height;
	unsigned int width;
	unsigned int component;
	unsigned int component_id[MAX_COMPONENTS];
	unsigned int hv_count[MAX_COMPONENTS];
	unsigned int quant_index[MAX_COMPONENTS];	
} SOF_T;

static int define_start_of_frame(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	SOF_T sof;
	uint8_t *buf_ptr;
	uint8_t i;

	//int component_id[MAX_COMPONENTS];
    	//int hv_count[MAX_COMPONENTS]; /* horizontal and vertical count for each component */
	//int quant_index[MAX_COMPONENTS];	
    		
    	buf_ptr = *pbuf_ptr;
	
	sof.precision = get_24bits(&buf_ptr);
	sof.height = get_16bits(&buf_ptr);
	sof.width = get_16bits(&buf_ptr);
	sof.component = get_8bits(&buf_ptr);
	
	for(i=0;i<sof.component;i++) {
        	sof.component_id[i] = get_8bits(&buf_ptr) ;
        	sof.hv_count[i] = get_8bits(&buf_ptr) ;
		sof.quant_index[i] = get_8bits(&buf_ptr) ;
		 if (sof.quant_index[i] >= 4)
            		break;
		 //printf("component %d id: %d val %d quant:%d\n",
		 //	i,sof.component_id[i], sof.hv_count[i], sof.quant_index[i]);
	}	
	//printf("jpeg image width %d height %d\n",sof.width, sof.height);
	av->height = sof.height;
	av->width = sof.width;
	//printf("picture width is %d(0x%x)\n",sof.width,sof.width);
    	//printf("picture height is %d(0x%x)\n",sof.height,sof.height);		
	*pbuf_ptr = buf_ptr;
	return 1;
}


static int define_arith_coding(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_quantization_table(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	

	return 1;
}

static int define_restart_interval(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	

	return 1;
}

static int define_huffman_table(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	

	return 1;
}

static int read_comment(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

static int define_app_spec_data(JPGCodecContext  *av, uint8_t **pbuf_ptr)
{
	uint8_t *buf_ptr;
    	
    	buf_ptr = *pbuf_ptr;


	*pbuf_ptr = buf_ptr;	
	return 1;
}

/* return the 8 bit start code value and update the search
   state. Return -1 if no start code found */
static int find_markers(uint8_t **pbuf_ptr, uint8_t *buf_end)
{
    uint8_t *buf_ptr;
    unsigned int v1, v2;
    int val;

    buf_ptr = *pbuf_ptr;
    while (buf_ptr < buf_end) {
        v1 = *buf_ptr++;
	v2 = *buf_ptr;
        if ((v1 == 0xff) && (v2 >= 0xc0) && (v2 <= 0xfe)) {
	    val = *buf_ptr++;
	    goto found;
        }
    }
    val = -1;
found:
    *pbuf_ptr = buf_ptr;
    return val;
}


static void jpeg_parse_buffer(JPGCodecContext  *av, char *buf, int len)
{
	uint8_t *buf_start=buf, *buf_end = buf + len;
	int ret, val;
	while( buf_start < buf_end) {
	    	if(  ( val=find_markers(&buf_start, buf_end ) ) != -1 ) {
				
			switch( val ) {
				case SOF0:/* baseline */
				case SOF1:/* extended sequential, huffman */
				case SOF2:/* progressive, huffman */
				case SOF3:/* lossless, huffman */
				case SOF5:/* differential sequential, huffman */
				case SOF6:/* differential progressive, huffman */
				case SOF7:/* differential lossless, huffman */
    				case SOF9:/* extended sequential, arithmetic */
    				case SOF10:/* progressive, arithmetic */
    				case SOF11:/* lossless, arithmetic */
    				case SOF13:/* differential sequential, arithmetic */
    				case SOF14:/* differential progressive, arithmetic */
    				case SOF15:/* differential lossless, arithmetic */
					ret = define_start_of_frame(av, &buf_start);
					//printf("SOF width %x height %x fmt %x\n",c->width, c->height, c->pix_fmt);
					break;	
				case JPG:/* reserved for JPEG extension */
					//printf("JPG\n");
					break;
    				case DHT:/* define huffman tables */
					ret = define_huffman_table(av, &buf_start);
					//printf("DHT\n");
					break;
    				case DAC:/* define arithmetic-coding conditioning */
					ret = define_arith_coding(av, &buf_start);
					//printf("DAC\n");
					break;	
       				case RST0:
				case RST1:
				case RST2:
				case RST3:
				case RST4:
				case RST5:
				case RST6:
				case RST7:
					//printf("RST\n");
					break;
				case SOI:/* start of image */
					ret = define_start_of_image(av, &buf_start);
					//printf("SOI\n");
					break;	
				case EOI:/* end of image */
					ret = define_end_of_image(av, &buf_start);
					//printf("EOI\n");
					break;	
    				case SOS:/* start of scan */
					ret = define_start_of_scan(av, &buf_start);
					//printf("SOS\n");
					break;		
    				case DQT:/* define quantization tables */
					ret = define_quantization_table(av, &buf_start);
					//printf("DQT\n");
					break;	
    				case DNL:/* define number of lines */
					ret = define_number_line(av, &buf_start);
					//printf("DNL\n");
					break;	
				case DRI:/* define restart interval */
					ret = define_restart_interval(av, &buf_start);
					//printf("DRI\n");
					break;
    				case DHP:/* define hierarchical progression */
					ret = define_hierarchical_progress(av, &buf_start);
					//printf("DHP\n");
					break;
    				case EXP:/* expand reference components */
					ret = define_expend_ref_component(av, &buf_start);
					//printf("EXP\n");
					break;	
				case APP0:
				case APP1:
				case APP2:
				case APP3:
    				case APP4:
    				case APP5:
				case APP6:
				case APP7:
				case APP8:
				case APP9:
				case APP10:
				case APP11:
				case APP12:
				case APP13:
				case APP14:
				case APP15:
					ret = define_app_spec_data( av, &buf_start);
					//printf("APP\n");
					break;
				case JPG0:
				case JPG1:
    				case JPG2:
    				case JPG3:
				case JPG4:
				case JPG5:
				case JPG6:
				case JPG7:
				case JPG8:
				case JPG9:
				case JPG10:
				case JPG11:
				case JPG12:
				case JPG13:
					//printf("JPG\n");
					break;
				case COM:/* comment */
					ret = read_comment(av, &buf_start);
					//printf("COM\n");
					break;
				case TEM:/* temporary private u*/
					//printf("TEM\n");
					break;
				default:
					break;
			
			}
	
	    	}
       }
 }


/**
 *
 * Print on stdout a nice hexa dump of a buffer
 * @param buf buffer
 * @param size buffer size
 */
void debug_hex_dump(uint8_t *buf, int size)
{
    int len, i, j, c;

    printf("==== buf size %x ====\n", size);
    for(i=0;i<size;i+=16) {
        len = size - i;
        if (len > 16)
            len = 16;
        printf("%08x ", i);
        for(j=0;j<16;j++) {
            if (j < len)
                printf(" %02x", buf[i+j]);
            else
                printf("   ");
        }
        printf(" ");
        for(j=0;j<len;j++) {
            c = buf[i+j];
            if (c < ' ' || c > '~')
                c = '.';
            printf("%c", c);
        }
        printf("\n");
    }
}


typedef struct FMJpegDecContext {
    #ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    unsigned int   pic_count;
    uint64              nsecs;
    struct               timeval t1;
    #endif    
} FMJpegDecContext;


int fmjpeg_decode(AVCodecContext *avctx, void *data, int *got_picture, uint8_t *buf, int buf_size)
{
    FMJpegDecContext    *s=avctx->priv_data;
    FJPEG_DEC_FRAME frame; 	
#ifdef MJPG_PARSING_BS	
    JPGCodecContext  jctx;
#endif

    AVFrame             *picture=data;

#ifdef MJPG_PARSING_BS
    jpeg_parse_buffer(&jctx, buf, buf_size);
    if (( jctx.height > avctx->height) || (jctx.width > avctx->width )) {
	printf("Error to Picture width*height > framr buffern\n");
	printf("picture width x height : %d x %d\n",jctx.width,jctx.height);
	printf("framebuffer width x height: %d x %d\n",avctx->width,avctx->height);
	*got_picture = 0;
         return -1;
    }
#endif
    
#ifdef MJPG_DEBUG_IN_FILE
    if(picture->data[0]!=0)   {
        fwrite((void *)picture->data[0],avctx->width*avctx->height,1,din);
        fwrite((void *)picture->data[1],avctx->width*avctx->height/4,1,din);
        fwrite((void *)picture->data[2],avctx->width*avctx->height/4,1,din);
    }
#endif	

    frame.buf = buf;
    frame.buf_size = buf_size;	
    frame.pu8YUVAddr[0]=picture->data[0];
    frame.pu8YUVAddr[1]=picture->data[1];
    frame.pu8YUVAddr[2]=picture->data[2];
	
#if 0 //shift(x,y)
    { 
        unsigned int x,y;
        y=0; //shift y pixel
        x=3; //shift 16*x pixel
        s->xparam.pu8YUVAddr[0]+=((720*y)+(16*x));
        s->xparam.pu8YUVAddr[1]+=((360*y/2)+(8*x));
        s->xparam.pu8YUVAddr[2]+=((360*y/2)+(8*x));
    }
#endif
	
    if ( ioctl(fmjpeg_dec_fd,FMJPEG_IOCTL_DECODE_ONE,&frame) < 0 ) {
        close(fmjpeg_dec_fd);
        printf("Error to set FMJPEG_IOCTL_DECODE_ONE\n");
        return -1;
    }		
   	
    *got_picture = 1;
	
#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    s->pic_count++;
#endif    	

    return buf_size;
}


int fmjpeg_decoder_init(AVCodecContext *avctx)
{
    FMJpegDecContext    *s=avctx->priv_data;
    FJPEG_DEC_PARAM xparam;
	
    if(fmjpeg_dec_fd==0)
        fmjpeg_dec_fd=open(FMJPEG_DECODER_DEV,O_RDWR);
    if(fmjpeg_dec_fd==0)  {
        printf("Fail to open %s\n",FMJPEG_DECODER_DEV);
        fflush(stdout);
        return -1;
    } 
	
#ifdef MJPG_DEBUG_OUT_FILE
    sprintf(dout_name,"/tmp/out%d.jp",getpid());
    dout=fopen(dout_name,"wb");
    printf("Use decoder output name %s\n",dout_name);
#endif

#ifdef MJPG_DEBUG_IN_FILE
    sprintf(din_name,"/tmp/in%d.yuv",getpid());
    din=fopen(din_name,"wb");
    printf("Use decoder input name %s\n",din_name);
#endif

#ifdef USE_MMAP
    dec_mmap_addr=(int)mmap(0,(avctx->framebuffer_rgb_stride*avctx->framebuffer_rgb_height*3)/2,PROT_READ|PROT_WRITE,MAP_SHARED,fmjpeg_dec_fd, 0);
    if(dec_mmap_addr<=0) {
        printf("MJPEG MMAP Fail\n");	fflush(stdout);		
        close(fmjpeg_dec_fd);	
        return -1;
    }
#endif
    
    avctx->flags |= CODEC_FLAG_PICT_ALLOC_EXTERNAL;
    
    xparam.static_jpg=avctx->fmjpeg_jpeg;           // if 1, enable huff and quant table out
    xparam.frame_width = avctx->framebuffer_rgb_stride;
    xparam.frame_hight = avctx->framebuffer_rgb_height;		

    if ( ioctl(fmjpeg_dec_fd,FMJPEG_IOCTL_DECODE_CREATE,&xparam ) < 0 )     {
        close(fmjpeg_dec_fd);
        printf("Error to set FMJPEG_IOCTL_DECODE_CREATE\n");   fflush(stdout);	
        return -1;
    }
    
#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    s->pic_count=0;	
    time_start(&(s->t1));
#endif 	
	
    return 0;
}


int fmjpeg_decoder_end(AVCodecContext *avctx)
{
    FMJpegDecContext    *s=avctx->priv_data;
  
    if(fmjpeg_dec_fd)     {
        #ifdef USE_MMAP
        munmap(dec_mmap_addr,(avctx->framebuffer_rgb_stride*avctx->framebuffer_rgb_height*3)/2);
        #endif
        close(fmjpeg_dec_fd);
    }
    fmjpeg_dec_fd = 0;
	
#ifdef MJPG_DEBUG_OUT_FILE
    fclose(dout);
#endif

#ifdef MJPG_DEBUG_IN_FILE
    fclose(din);
#endif
    
#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT	
    s->nsecs = time_stop(&(s->t1)) / 1000;
    printf("spend %lld milliseconds with toal frame count %d = %f(frames/per second)\n", s->nsecs,s->pic_count,(float)((float)s->pic_count)/((float)s->nsecs/(float)1000.0));
    printf("JPEG decoding is done. %d frames are decoded\n",s->pic_count);	
#endif		

    return 0;
}


int fmjpeg_decoder_sj(int width,int height,char *buf, unsigned int buf_size, char *data0,char *data1,char *data2)
{
    unsigned int    picture_size;
    FJPEG_DEC_PARAM dec_param;
    FJPEG_DEC_FRAME frame; 
    int dec_fd, dec_mmap;
	
    if(dec_fd==0)
        dec_fd=open(FMJPEG_DECODER_DEV,O_RDWR);
    if(dec_fd==0)     {
        printf("Fail to open %s\n",FMJPEG_DECODER_DEV);
        fflush(stdout);
        return -1;
    } 

    dec_param.static_jpg = 1;
    dec_param.frame_hight=height;   // frame buffer height
    dec_param.frame_width=width;    // frame buffer width
   
    if ( ioctl(dec_fd,FMJPEG_IOCTL_DECODE_CREATE,&dec_param) < 0  ) {
        close(dec_fd);
        printf("Error to set FMJPEG_IOCTL_DECODE_CREATE\n");
        fflush(stdout);	
        return -1;
    }

    frame.pu8YUVAddr[0] = data0;
    frame.pu8YUVAddr[1] = data1;
    frame.pu8YUVAddr[2] = data2;
    frame.buf = buf;
    frame.buf_size = buf_size;
       
    if ( ioctl(dec_fd,FMJPEG_IOCTL_DECODE_ONE,&frame) < 0 )  {
        close(dec_fd);
        printf("Error to set FMJPEG_IOCTL_DECODE_ONE\n");
        return -1;
    }		

     if(dec_fd)   
        close(dec_fd);
    dec_fd = 0;
	
    return 0;  
}

/////////////////////////////////////////////////////////////////////////////////
//                                      JPEG/MJPEG encoder                                                                                             //
/////////////////////////////////////////////////////////////////////////////////

typedef struct FMJpegEncDevBuf {
    unsigned int *dev_post;
    unsigned int *dev_curr;
}FMJpegEncDevBuf;

typedef struct{
	unsigned int *buf;
	int mb_width;
	int mb_height;
}FMJpegDevInfo;

typedef struct FMJpegEncContext {
    FJPEG_ENC_PARAM enc_param;
    FMJpegEncDevBuf dev_buf;
    MOTION_DET_INFO motion;	
    AVFrame picture;	
    unsigned char *image_phy_addr[3];
    unsigned char *image_virt_addr[3];
    unsigned char *pbitstream_phy_addr;
    unsigned char *pbitstream_virt_addr;
    unsigned int frm_cnt;	
    #ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    unsigned int   pic_count;	
    uint64              nsecs;
    struct               timeval t1;
    #endif    
} FMJpegEncContext;

#define MAX(x, y) ((x) > (y) ? (x) : (y))
#define MIN(x, y) ((x) < (y) ? (x) : (y))
#define IMAGE_COMP		3
#define IMAGE_QUALITY	60
#define IMAGE_RST		0

#define SET_COMP(index,hsamp,vsamp)  \
  (s->enc_param.rgComponentInfo[index].m_u8HSamplingFrequency = (hsamp), \
   s->enc_param.rgComponentInfo[index].m_u8VSamplingFrequency = (vsamp))

struct mdcfg {
    unsigned int    exist;
    pid_t           pid;
    unsigned int    md_interval;
    unsigned int    x0_LU;
    unsigned int    y0_LU;
    unsigned int    x0_RD;
    unsigned int    y0_RD;
    
    unsigned int    x1_LU;
    unsigned int    y1_LU;
    unsigned int    x1_RD;
    unsigned int    y1_RD;
    
    unsigned int    x2_LU;
    unsigned int    y2_LU;
    unsigned int    x2_RD;
    unsigned int    y2_RD;

    unsigned int    dev_th0;
    unsigned int    alarm_idx0;

    unsigned int    dev_th1;
    unsigned int    alarm_idx1;

    unsigned int    dev_th2;
    unsigned int    alarm_idx2;
    unsigned int    ver;
    unsigned char   filename[30];
    unsigned int    rotate;
    unsigned int    rotate_idx;
} mdcfg;

struct mdcfg mdval;

void fmj_set_motion_cfg(struct mdcfg *val)
{
    memcpy(&mdval,val,sizeof(struct mdcfg));
}

void motion_alarm_init(MOTION_DET_INFO *motion)
{
	
	motion->range_mb_x0_LU = mdval.x0_LU;
  	motion->range_mb_y0_LU = mdval.y0_LU;
  	motion->range_mb_x0_RD = mdval.x0_RD;
  	motion->range_mb_y0_RD = mdval.y0_RD;
  	motion->range_mb_x1_LU = mdval.x1_LU;
  	motion->range_mb_y1_LU = mdval.y1_LU;
  	motion->range_mb_x1_RD = mdval.x1_RD;
  	motion->range_mb_y1_RD = mdval.y1_RD;
  	motion->range_mb_x2_LU = mdval.x2_LU;
  	motion->range_mb_y2_LU = mdval.y2_LU;
  	motion->range_mb_x2_RD = mdval.x2_RD;
  	motion->range_mb_y2_RD = mdval.y2_RD;
	
  	motion->delta_dev_th0 = mdval.dev_th0;   
  	motion->delta_dev_th1 = mdval.dev_th1;       
  	motion->delta_dev_th2 = mdval.dev_th2;     
  	motion->md_interval = mdval.md_interval;  	
}

typedef struct ACTIVITY{
	unsigned int active0;
	unsigned int active1;
	unsigned int active2;
} ACTIVITY;

#define abs(X)    (((X)>0)?(X):-(X))
void motion_detection_function(
	unsigned int *dev_curr, 
	unsigned int *dev_post, 
	MOTION_DET_INFO *motion, 
	ACTIVITY *activity,
	int mbwidth,
	int mbheight)
{
								
		unsigned int x_pos, y_pos;
		unsigned int dev_delta ;
		for(y_pos=0 ; y_pos<mbheight; y_pos++ ) {
			for ( x_pos=0 ; x_pos<mbwidth ; x_pos++ ) {
				dev_delta = dev_post[(y_pos*mbwidth)+x_pos] - dev_curr[(y_pos*mbwidth)+x_pos ];
		    		if ( (x_pos >= motion->range_mb_x0_LU) && (y_pos >= motion->range_mb_y0_LU) &&
	     				(x_pos <  motion->range_mb_x0_RD) && (y_pos <  motion->range_mb_y0_RD) ) {
	        				if (abs(dev_delta) > motion->delta_dev_th0) {
						activity->active0++;
						//fprintf(stdout,"0: x %d y %d curr 0x%x post %x\n", x_pos, y_pos, dev_curr[(y_pos*mbwidth)+x_pos], dev_post[(y_pos*mbwidth)+x_pos ]);		
						//fflush(stdout);
         				}
         			}
				if ( (x_pos >= motion->range_mb_x1_LU) && (y_pos >= motion->range_mb_y1_LU) &&
	     				(x_pos <  motion->range_mb_x1_RD) && (y_pos <  motion->range_mb_y1_RD) ) {
         				if (abs(dev_delta)> motion->delta_dev_th1) {
         					activity->active1++;
						//fprintf(stdout,"1: x %d y %d curr 0x%x post %x\n", x_pos, y_pos, dev_curr[(y_pos*mbwidth)+x_pos], dev_post[(y_pos*mbwidth)+x_pos ]);		
						//fflush(stdout);	
         				}
         			}
				if ( (x_pos >= motion->range_mb_x2_LU) && (y_pos >= motion->range_mb_y2_LU) &&
	     				(x_pos <  motion->range_mb_x2_RD) && (y_pos <  motion->range_mb_y2_RD) ) {
            				if (abs(dev_delta)> motion->delta_dev_th2) {
            					activity->active2++;
						//fprintf(stdout,"2: x %d y %d curr 0x%x post %x\n", x_pos, y_pos, dev_curr[(y_pos*mbwidth)+x_pos], dev_post[(y_pos*mbwidth)+x_pos ]);		
						//fflush(stdout);		
            				}
            			}
      			}
		}	
}

void fmjpeg_get_buf(unsigned int *devinfo)
{
     if ( ioctl(fmjpeg_enc_fd,FMJPEG_IOCTL_ENCODE_DEVBUF,devinfo) < 0 ) {
         close(fmjpeg_enc_fd);
         printf("Error to set FMJPEG_IOCTL_ENCODE_DEVBUF\n"); 
         return;
    }
}
int fmjpeg_encode(AVCodecContext *avctx,uint8_t *buf,int buf_size,void *data)
{
    FMJpegEncContext    *s=(FMJpegEncContext *)avctx->priv_data;
    AVFrame             *pav = (AVFrame *)data;  
    FMJpegDevInfo dev_info;	
    unsigned int        bitstream_size;
    unsigned int *dev_tmp;
    ACTIVITY active;		
    int                 i;    

#ifdef MJPG_DEBUG_IN_FILE
    if(pav->data[0]!=0) {
        fwrite((void *)pav->data[0],avctx->width*avctx->height,1,din);
        fwrite((void *)pav->data[1],avctx->width*avctx->height/4,1,din);
        fwrite((void *)pav->data[2],avctx->width*avctx->height/4,1,din);
    }
#endif

#ifdef USE_MMAP
    {
        int y_sz,uv_sz;
        y_sz=(avctx->width*avctx->height);
        uv_sz=(avctx->width*avctx->height)/4;
        if(pav->data[0]!=0) {
            memcpy((char *)enc_mmap_addr,pav->data[0],y_sz);
            memcpy((char *)enc_mmap_addr+y_sz,pav->data[1],uv_sz);
            memcpy((char *)enc_mmap_addr+y_sz+uv_sz,pav->data[2],uv_sz);
            s->enc_param.pu8YUVAddr[0] = enc_mmap_addr;
            s->enc_param.pu8YUVAddr[1] = enc_mmap_addr+y_sz;
            s->enc_param.pu8YUVAddr[2] = enc_mmap_addr+y_sz+uv_sz;
        } else {
            for(i=0;i<s->enc_param.u8NumComponents;i++)
        		s->enc_param.pu8YUVAddr[i]=0;  // capture YUV virtual addres
        }
    }
#else
    for(i=0;i<s->enc_param.u8NumComponents;i++)
        s->enc_param.pu8YUVAddr[i]=(unsigned char *)pav->data[i];  // capture YUV virtual addres
#endif
    s->enc_param.pu8BitstreamAddr = buf;

     if ( mdval.exist ) {	
	 // only run once on the first time	
	unsigned int mb_w = s->enc_param.rgComponentInfo[0].m_u8HSamplingFrequency*8;
	unsigned int mb_h = s->enc_param.rgComponentInfo[0].m_u8HSamplingFrequency*8;
	unsigned int mb_width, mb_height;
			
         mb_width=(avctx->width+mb_w-1)/mb_w;
         mb_height=(avctx->height+mb_h-1)/mb_h;
	dev_info.buf = s->dev_buf.dev_post;
	dev_info.mb_height = mb_width;
	dev_info.mb_width = mb_height;
	if ((s->frm_cnt % s->motion.md_interval) == 0 )
		fmjpeg_get_buf(s->dev_buf.dev_post);
     }	
    
    if ( s->enc_param.roi_enable == 1) {
        s->enc_param.roi_left_x = 0;
        s->enc_param.roi_left_y = 0;
        s->enc_param.roi_right_x = 320;
        s->enc_param.roi_right_y = 240;
    }
	
    if ( ioctl(fmjpeg_enc_fd,FMJPEG_IOCTL_ENCODE_ONE,&(s->enc_param)) < 0 )   {
        close(fmjpeg_enc_fd);
        printf("Error to set FMJPEG_IOCTL_ENCODE_ONE\n"); 
        fflush(stdout);
        return -1;
    }	
    bitstream_size = s->enc_param.bitstream_size;
    
    // motion detection
    if ( mdval.exist ) {
	unsigned int mb_w = s->enc_param.rgComponentInfo[0].m_u8HSamplingFrequency*8;
	unsigned int mb_h = s->enc_param.rgComponentInfo[0].m_u8HSamplingFrequency*8;
	unsigned int mb_width, mb_height;
			
         mb_width=(avctx->width+mb_w-1)/mb_w;
         mb_height=(avctx->height+mb_h-1)/mb_h;
	dev_info.buf = s->dev_buf.dev_curr;
	dev_info.mb_height = mb_width;
	dev_info.mb_width = mb_height;
	if ((s->frm_cnt % s->motion.md_interval) ==0)  {	
		fmjpeg_get_buf(s->dev_buf.dev_curr);
         	// caculate motion detection function
	         active.active0=0;
	         active.active1=0;
	         active.active2=0;
	         motion_detection_function(s->dev_buf.dev_curr, s->dev_buf.dev_post, &s->motion, &active, mb_width, mb_height);
	}
	
	//fprintf(stdout,"[%d %d %d]\n",active.active0, active.active1, active.active2);
	//fflush(stdout);
	
	/*
	 * MOTION ALARM ACTION
	 * User can insert their motion alarm action in the follow.
	 * Their motion alarm action can reference the 3 active value.
	 * If every active is lager than user defined threshold value,
	 * motion alarm will generate.
	 */
          // if( (active.active0 >=100) && (active.active1>=100) && (active.active2>=100))
	// 	motion_alarm_action(active);
		
    }		
  
#ifdef MJPG_DEBUG_OUT_FILE
    out_ver++;
    if ( !(out_ver % 0x20) )  {
        sprintf(dout_name,"out%d.jpg",out_ver);
        dout=fopen(dout_name,"wb");
        printf("Use encoder output name %s\n",dout_name);
        fflush(stdout);
        fwrite((void *)buf, bitstream_size, 1, dout);
        fclose(dout);
    }
#endif

#ifdef MJPG_DEBUG_OUT_QUANT
    { 
        FILE *qdout;
        char *qbuf;
        qdout=fopen("quant.txt","wb");
        qbuf = malloc(0x400*sizeof(char));
        fwrite((void *)buf, 0x400, 1, qdout);
        fclose(qdout);
        while(1)
            ;
    }
#endif
    s->frm_cnt++;

#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    s->pic_count++;
#endif	
	
    return bitstream_size;		
}


int fmjpeg_encoder_init(AVCodecContext *avctx)
{
    FMJpegEncContext    *s=(FMJpegEncContext *)avctx->priv_data;
    unsigned int        yuv_size=0;  
    unsigned int        max_h_samp,max_v_samp;
    unsigned int        image_size[3];
    int                         YUVsampling, i;
    

    if(fmjpeg_enc_fd==0)
        fmjpeg_enc_fd=open(FMJPEG_ENCODER_DEV,O_RDWR);
    if(fmjpeg_enc_fd==0)    {
        printf("Fail to open %s\n",FMJPEG_ENCODER_DEV);
        fflush(stdout);
        return -1;
    } 

#ifdef MJPG_DEBUG_OUT_FILE
        out_ver = 0;
#endif
	
#ifdef MJPG_DEBUG_IN_FILE
        sprintf(din_name,"in%d.yuv",getpid());
        din=fopen(din_name,"wb");
        printf("Use encoder input name %s\n",din_name);
        fflush(stdout);	
#endif
	
    s->enc_param.u32ImageQuality = avctx->fmjpeg_image_quality; // we set image quality to 90 (0~100)  
    s->enc_param.u32RestartInterval = avctx->fmjpeg_restart; // we set restart interval to 5  
    s->enc_param.u32ImageWidth=avctx->width;  // set image width
    s->enc_param.u32ImageHeight=avctx->height; // set image height
    // to describe the YUV format through the following encoding parameters  
    s->enc_param.u8NumComponents = IMAGE_COMP; // the input image has 3 components 'YUV'  
    s->enc_param.u8JPGPIC = avctx->fmjpeg_jpeg;
     //0: for mp4, 1: for jpg, 2: for H.264 in 8120
     //0: for mp4, 0: for jpg, 2 for H.264 in 8180
    if ( avctx->grabtype  != 0xFFFFFFFF)
	    s->enc_param.u82D = avctx->grabtype;
    else 
	    s->enc_param.u82D = 1;   
	
    s->enc_param.u32ImageMotionDetection = mdval.exist; //avctx->fmjpeg_md;
    // ROI information
    s->enc_param.roi_enable = 0;
    s->enc_param.roi_left_x = 0;
    s->enc_param.roi_left_y = 0;
    s->enc_param.roi_right_x = 320;
    s->enc_param.roi_right_y = 240;
	
    if(s->enc_param.u32ImageMotionDetection ) {
    	unsigned int   mb_width,mb_height;
				
         mb_width=(avctx->width+15)/16;
         mb_height=(avctx->height+15)/16;
		
    	s->dev_buf.dev_curr=malloc(sizeof(s->dev_buf.dev_curr)*mb_width*mb_height);
    	if(s->dev_buf.dev_curr==0)    {
       		printf("Can't allocate dev_curr memory!\n");
        		return -1;
     	}
   	s->dev_buf.dev_post=malloc(sizeof(s->dev_buf.dev_post)*mb_width*mb_height);
         if(s->dev_buf.dev_post==0)    {
         	printf("Can't allocate dev_post memory!\n");
		free( s->dev_buf.dev_curr );	
         	return -1;
         }
	motion_alarm_init(&s->motion);		
    } 
    
  	//YUVsampling = 0; // YUV420
  	//YUVsampling = 1; // YUV422
  	//YUVsampling = 2; // YUV211
  	//YUVsampling = 3; // YUV333
  	//YUVsampling = 4; // YUV222
  	//YUVsampling = 5; // YUV111
  	YUVsampling = avctx->fmjpeg_yuv_format;

    if(s->enc_param.u8NumComponents==1)
        YUVsampling = 5; // if there is only one component, it is gray, so we force it YUV111
    
    switch (YUVsampling)
    {
        case 0:
            SET_COMP(0, 2, 2);
            SET_COMP(1, 1,1);
            SET_COMP(2, 1,1);
            break;
        case 1:
            SET_COMP(0, 4,1);
            SET_COMP(1, 2,1);
            SET_COMP(2, 2,1);
            break;
        case 2:
            SET_COMP(0, 2,1);
            SET_COMP(1, 1,1);
            SET_COMP(2, 1,1);
            break;
        case 3:
            SET_COMP(0, 3,1);
            SET_COMP(1, 3,1);
            SET_COMP(2, 3,1);
            break;
        case 4:
            SET_COMP(0, 2,1);
            SET_COMP(1, 2,1);
            SET_COMP(2, 2,1);
            break;
        case 5:
            SET_COMP(0, 1,1);
            SET_COMP(1, 1,1);
            SET_COMP(2, 1,1);
            break;
        default:
            break;
    }  // to set each component's sampling factor (horizontally and vertically)
  	   
    // get the maximum horizontal sampling factor
    max_h_samp=MAX(s->enc_param.rgComponentInfo[0].m_u8HSamplingFrequency,
            MAX(s->enc_param.rgComponentInfo[1].m_u8HSamplingFrequency,
            s->enc_param.rgComponentInfo[2].m_u8HSamplingFrequency));
    // get the maximum horizontal sampling factor
    max_v_samp=MAX(s->enc_param.rgComponentInfo[0].m_u8VSamplingFrequency,
            MAX(s->enc_param.rgComponentInfo[1].m_u8VSamplingFrequency,
            s->enc_param.rgComponentInfo[2].m_u8VSamplingFrequency)); 
                 
         
  	// calculate each component size according to its maximum sampling factor
  	// and individual sampling factor
    for(i=0;i<s->enc_param.u8NumComponents;i++)  {
        image_size[i]=(((s->enc_param.rgComponentInfo[i].m_u8HSamplingFrequency*s->enc_param.u32ImageWidth) /max_h_samp) *
                ((s->enc_param.rgComponentInfo[i].m_u8VSamplingFrequency*s->enc_param.u32ImageHeight) /max_v_samp));
        yuv_size += image_size[i];      
    }
   	
    avctx->coded_frame= &s->picture;	

    if ( ioctl(fmjpeg_enc_fd,FMJPEG_IOCTL_ENCODE_CREATE,&(s->enc_param)) < 0 )  {
        close(fmjpeg_enc_fd);
        printf("Error to set FMJPEG_IOCTL_ENCODE_CREATE\n");
        fflush(stdout);
        return -1;
    }	
    s->frm_cnt = 0;
	
#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    s->pic_count=0;
    time_start( &(s->t1) );
#endif
  	
    return 0;
}


int fmjpeg_encoder_end(AVCodecContext *avctx)
{
    FMJpegEncContext *s=(FMJpegEncContext *)avctx->priv_data;
   
    if(fmjpeg_enc_fd)   {
	#ifdef USE_MMAP
        	munmap(enc_mmap_addr,(avctx->width*avctx->height*3)/2);
	#endif
         close(fmjpeg_enc_fd);
    }
    fmjpeg_enc_fd = 0;
	
    if(s->dev_buf.dev_curr)
	free(s->dev_buf.dev_curr);
    if(s->dev_buf.dev_post)
	free(s->dev_buf.dev_post);
	
#ifdef MJPG_DEBUG_OUT_FILE
        out_ver = 0;
#endif
	
#ifdef MJPG_DEBUG_IN_FILE
        fclose(din);
#endif

#ifdef SHOW_PERFORMANCE_EVALUATION_RESULT
    s->nsecs = time_stop(&(s->t1)) / 1000;
    printf("spend %lld milliseconds with toal frame count %d = %f(frames/per second)\n", s->nsecs,s->pic_count,(float)((float)s->pic_count)/((float)s->nsecs/(float)1000.0));
    printf("JPEG decoding is done. %d frames are encoded\n",s->pic_count);	
#endif

    return 0;
}

// for mpeg4 motion detection alarm 
// encode one jpg picture
typedef struct {
    unsigned int *jpg_phy_addr;
    unsigned int *jpg_virt_addr;
    int size;
}MJPG_ENC_MEM;

int fmjpeg_encoder_sj(int width,int height,char *data0,char *data1,char *data2, char *outfile)
{
#ifdef PATTERN_FILE
    FILE            *pin;
#endif
    FILE            *fout;
    FJPEG_ENC_PARAM enc_param;
    unsigned int    y_image_size,u_image_size,v_image_size;
    unsigned int    max_h_samp,max_v_samp;
    int             fjpeg_enc_fd=0;
    int             buf_size;
    char         *outbuf;

    if(fjpeg_enc_fd==0)
        fjpeg_enc_fd=open(FMJPEG_ENCODER_DEV,O_RDWR);
    if(fjpeg_enc_fd==0)  {
        printf("Fail to open %s\n",FMJPEG_ENCODER_DEV);
        fflush(stdout);
        goto open_device_fail;
    }

    buf_size = width*height +  width*height/2;
    outbuf=malloc(sizeof(char)*buf_size);
    if(!outbuf)   {
        printf("output buf allocate fail\n");
        fflush(stdout);
        goto 	alloc_outbuf_fail;
    }

    fout=fopen(outfile,"wb");
    if(!fout)  {
        printf("open outfile err\n");
        fflush(stdout);
        goto 	alloc_openfile_fail;	
    }
	
#ifdef PATTERN_FILE //test for sequential data
    pin = fopen("D1_420p.yuv","r");
    if(!pin)  {
        printf("input pattern file open err\n");
        fflush(stdout);
        goto open_yuv_file;		
    }
    fread(data0,width*height,1,pin);
    fread(data1,width*height/4,1,pin);
    fread(data2,width*height/4,1,pin);
    fclose(pin);	
#endif
		
    // to set each component's sampling factor (horizontally and vertically)
    // set Y component's sampling factor
    enc_param.rgComponentInfo[0].m_u8HSamplingFrequency=2;
    enc_param.rgComponentInfo[0].m_u8VSamplingFrequency=2;
    // set U component's sampling factor
    enc_param.rgComponentInfo[1].m_u8HSamplingFrequency=1;
    enc_param.rgComponentInfo[1].m_u8VSamplingFrequency=1;
    // set V component's sampling factor
    enc_param.rgComponentInfo[2].m_u8HSamplingFrequency=1;
    enc_param.rgComponentInfo[2].m_u8VSamplingFrequency=1;
    
    // to describe the YUV format through the following encoding parameters  
    enc_param.u8NumComponents = IMAGE_COMP; // the input image has 3 components 'YUV'  
    enc_param.u32ImageQuality=40; // we set image quality to 90 (0~100)  
    enc_param.u32RestartInterval=10; // we set restart interval to 5  
    enc_param.u32ImageWidth=width;  // set image width
    enc_param.u32ImageHeight=height; // set image height
    enc_param.u8JPGPIC = 1;
    enc_param.u32ImageMotionDetection=0;
    enc_param.u82D = 0;  	   //0 for mp4 1 for jpg 2 for H.264
    enc_param.roi_enable = 0;
    enc_param.roi_left_x = 120;
    enc_param.roi_left_y = 120;
    enc_param.roi_right_x = 440;
    enc_param.roi_right_y = 360;

    // get the maximum horizontal sampling factor
    max_h_samp=MAX(enc_param.rgComponentInfo[0].m_u8HSamplingFrequency,
    MAX(enc_param.rgComponentInfo[1].m_u8HSamplingFrequency,
    enc_param.rgComponentInfo[2].m_u8HSamplingFrequency));
    // get the maximum horizontal sampling factor
    max_v_samp=MAX(enc_param.rgComponentInfo[0].m_u8VSamplingFrequency,
    MAX(enc_param.rgComponentInfo[1].m_u8VSamplingFrequency,
    enc_param.rgComponentInfo[2].m_u8VSamplingFrequency));  
    
    // calculate each component size according to its maximum sampling factor
    // and individual sampling factor
    y_image_size=(((enc_param.rgComponentInfo[0].m_u8HSamplingFrequency*enc_param.u32ImageWidth)/max_h_samp)*
        ((enc_param.rgComponentInfo[0].m_u8VSamplingFrequency*enc_param.u32ImageHeight)/max_v_samp));
    u_image_size=(((enc_param.rgComponentInfo[1].m_u8HSamplingFrequency*enc_param.u32ImageWidth)/max_h_samp)*
        ((enc_param.rgComponentInfo[1].m_u8VSamplingFrequency*enc_param.u32ImageHeight)/max_v_samp));
    v_image_size=(((enc_param.rgComponentInfo[2].m_u8HSamplingFrequency*enc_param.u32ImageWidth)/max_h_samp)*
        ((enc_param.rgComponentInfo[2].m_u8VSamplingFrequency*enc_param.u32ImageHeight)/max_v_samp));     
    
    enc_param.pu8YUVAddr[0]=data0;
    enc_param.pu8YUVAddr[1]=data1;
    enc_param.pu8YUVAddr[2]=data2;
		  
    // to create the jpeg encoder object
    if ( ioctl(fjpeg_enc_fd,FMJPEG_IOCTL_ENCODE_CREATE,&enc_param )<0 ) {
        printf("Error to set FMJPEG_IOCTL_ENCODE_CREATE\n");
        fflush(stdout);
        goto create_fail; 
    }	
    
    // to begin to encode the input image
    enc_param.pu8BitstreamAddr = outbuf;
    if ( ioctl(fjpeg_enc_fd,FMJPEG_IOCTL_ENCODE_ONE,&enc_param) < 0 ) {
        printf("Error to set FMJPEG_IOCTL_ENCODE_ONE\n"); 
        fflush(stdout);
        goto encode_fail;
    }	

    fflush(stdout);
    fwrite(outbuf, enc_param.bitstream_size, 1, fout);
    fclose(fout);

   
    if(outbuf) 
        free(outbuf);
	
    return enc_param.bitstream_size; 
	
encode_fail:
create_fail:	
#ifdef PATTERN_FILE
open_yuv_file:
#endif
          fclose(fout);
alloc_openfile_fail:
	free(outbuf);
alloc_outbuf_fail:
	close(fjpeg_enc_fd);		
open_device_fail:
	return -1;
	
}

AVCodec fmjpeg_decoder = {
    "fmjpeg_v2",
    CODEC_TYPE_VIDEO,
    CODEC_ID_MJPEG,
    sizeof(FMJpegDecContext),
    fmjpeg_decoder_init,
    NULL,
    fmjpeg_decoder_end,
    fmjpeg_decode,
    0
};

AVCodec fmjpeg_encoder = {
    "mjpeg_v2",
    CODEC_TYPE_VIDEO,
    CODEC_ID_MJPEG,
    sizeof(FMJpegEncContext),
    fmjpeg_encoder_init,
    fmjpeg_encode,
    fmjpeg_encoder_end,
    NULL,
    0
};


