#include <jni.h>
#include <binder/ProcessState.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/openmax/OMX_IVCommon.h>
#include <utils/List.h>
#include <new>
#include <android/log.h>
//#include "EyeColorConverter.h"

//#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGD(...)

//#define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
#define ERROR_END_OF_STREAM -0x10
#define ERROR_NO_MEM -0x11

using namespace android;

struct Frame {
	Frame() {
		status = OK;
		size = 0;
		time = 0;
		key = 0;
		buffer = NULL;
		mbuffer = NULL;
		w = 0;
		h = 0;
	}

    status_t status;
    size_t size;
    int64_t time;
    int key;
    uint8_t *buffer;
    MediaBuffer* mbuffer;
    int32_t w, h;
};

class CustomSource;

struct StagefrightContext {
	int64_t time;
	int32_t width;
	int32_t height;
	int32_t color_fmt;
	//int32_t pix_fmt;
    //AVCodecContext *avctx;
    //AVBitStreamFilterContext *bsfc;
    uint8_t* orig_extradata;
    int orig_extradata_size;
    sp<MediaSource> *source;
    List<Frame*> *in_queue, *out_queue;
    pthread_mutex_t in_mutex, out_mutex;
    pthread_cond_t condition;
    pthread_t decode_thread_id;

    Frame *end_frame;
    bool source_done;
    volatile sig_atomic_t thread_started, thread_exited, stop_decode;

    //AVFrame ret_frame;

    uint8_t *dummy_buf;
    int dummy_bufsize;

    OMXClient *client;
    sp<MediaSource> *decoder;
    const char *decoder_component;
    ColorConverter *converter;
};

class CustomSource : public MediaSource {
public:
    //CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
	CustomSource(StagefrightContext *ctx, sp<MetaData> meta) {
		//s = (StagefrightContext*)avctx->priv_data;
        s = (StagefrightContext*)ctx;
        source_meta = meta;
        //frame_size  = (avctx->width * avctx->height * 3) / 2;
        frame_size  = (s->width * s->height * 3) / 2;
        buf_group.add_buffer(new MediaBuffer(frame_size));
    }

    virtual sp<MetaData> getFormat() {
        return source_meta;
    }

    virtual status_t start(MetaData *params) {
    	LOGD("Source Start");
        return OK;
    }

    virtual status_t stop() {
    	LOGD("Source Stop");
        return OK;
    }

    virtual status_t read(MediaBuffer **buffer,
                          const MediaSource::ReadOptions *options) {
        Frame *frame;
        status_t ret;

        LOGD("read 1");
        if (s->thread_exited)
            return ERROR_END_OF_STREAM;
        pthread_mutex_lock(&s->in_mutex);

        LOGD("read 2");
        while (s->in_queue->empty())
            pthread_cond_wait(&s->condition, &s->in_mutex);

        LOGD("read 3");
        frame = *s->in_queue->begin();
        ret = frame->status;

        if (ret == OK) {
        	LOGD("read 4");
            ret = buf_group.acquire_buffer(buffer);
            if (ret == OK) {
            	LOGD("read 4 1 %p %d", frame->buffer, frame->size);
                memcpy((*buffer)->data(), frame->buffer, frame->size);
                (*buffer)->set_range(0, frame->size);
                (*buffer)->meta_data()->clear();
                (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
                (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
            } else {
                //av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
            }
            //av_freep(&frame->buffer);
            delete frame->buffer;
        }

        LOGD("read 5");
        s->in_queue->erase(s->in_queue->begin());
        pthread_mutex_unlock(&s->in_mutex);

        //av_freep(&frame);
        delete frame;
        return ret;
    }

private:
    MediaBufferGroup buf_group;
    sp<MetaData> source_meta;
    StagefrightContext *s;
    int frame_size;
};

void* decode_thread(void *arg)
{
    //AVCodecContext *avctx = (AVCodecContext*)arg;
    //StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
	StagefrightContext *s = (StagefrightContext*)arg;
    Frame* frame;
    MediaBuffer *buffer;
    int decode_done = 0;
    do {
    	LOGD("decode 1");
        buffer = NULL;
        //frame = (Frame*)av_mallocz(sizeof(Frame));
        frame = new Frame;
        if (!frame) {
            frame         = s->end_frame;
            //frame->status = AVERROR(ENOMEM);
            frame->status = ERROR_NO_MEM;
            decode_done   = 1;
            s->end_frame  = NULL;
        } else {
        	LOGD("decode 1 1 %p %p %p", s, s->decoder, frame);
            frame->status = (*s->decoder)->read(&buffer);
            LOGD("decode 1 2");
            if (frame->status == OK) {
            	LOGD("decode 1 3");
                sp<MetaData> outFormat = (*s->decoder)->getFormat();
                outFormat->findInt32(kKeyWidth , &frame->w);
                outFormat->findInt32(kKeyHeight, &frame->h);
                frame->size    = buffer->range_length();
                frame->mbuffer = buffer;
                LOGD("decode 1 4");
            } else if (frame->status == INFO_FORMAT_CHANGED) {
            	LOGD("decode 1 5");
                if (buffer)
                    buffer->release();
                //av_free(frame);
                delete frame;
                LOGD("decode 1 6");
                continue;
            } else {
                decode_done = 1;
            }
        }
        LOGD("decode 2");
        while (true) {
            pthread_mutex_lock(&s->out_mutex);
            if (s->out_queue->size() >= 10) {
                pthread_mutex_unlock(&s->out_mutex);
                usleep(10000);
                continue;
            }
            break;
        }
        LOGD("decode 3");
        s->out_queue->push_back(frame);
        pthread_mutex_unlock(&s->out_mutex);
        LOGD("decode 4");
    } while (!decode_done && !s->stop_decode);

    s->thread_exited = true;

    return 0;
}

//static av_cold int Stagefright_init(AVCodecContext *avctx)
static int Stagefright_init(StagefrightContext* s)
{
	const char avcc[] = { 0x1, 0x42, 0xe0, 0x14, 0xff, 0xe1, 0x0, 0x8, 0x67, 0x42, 0xe0, 0x14, 0xda, 0x05, 0x07, 0xc4, 0x01, 0x0, 0x5, 0x68, 0xc3, 0x30, 0xa4, 0x80 };
    //StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
    sp<MetaData> meta, outFormat;
    int32_t colorFormat = 0;
    int ret;

    // TODO
//    if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
//        return -1;

//    s->avctx = avctx;
//    s->bsfc  = av_bitstream_filter_init("h264_mp4toannexb");
//    if (!s->bsfc) {
//        av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
//        return -1;
//    }

//    s->orig_extradata_size = avctx->extradata_size;
//    s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
//                                              FF_INPUT_BUFFER_PADDING_SIZE);
//    if (!s->orig_extradata) {
//        ret = AVERROR(ENOMEM);
//        goto fail;
//    }
//    memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);

    s->time = 0;
    s->source_done = false;
    s->thread_started = false;
    s->thread_exited = false;
    s->stop_decode = false;
    s->dummy_buf = NULL;
    s->dummy_bufsize = 0;

    meta = new MetaData;
    if (meta == NULL) {
        //ret = AVERROR(ENOMEM);
        goto fail;
    }
    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
    meta->setInt32(kKeyWidth, s->width);
    meta->setInt32(kKeyHeight, s->height);
    //meta->setData(kKeyAVCC, kTypeAVCC, avcc, sizeof(avcc));

    android::ProcessState::self()->startThreadPool();

    s->source    = new sp<MediaSource>();
    *s->source   = new CustomSource(s, meta);
    s->in_queue  = new List<Frame*>;
    s->out_queue = new List<Frame*>;
    s->client    = new OMXClient;
    //s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
    s->end_frame = new Frame;
    if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
        !s->end_frame) {
        //ret = AVERROR(ENOMEM);
        goto fail;
    }

    if (s->client->connect() !=  OK) {
        //av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
        ret = -1;
        goto fail;
    }

    s->decoder  = new sp<MediaSource>();
    *s->decoder = OMXCodec::Create(s->client->interface(), meta, false, *s->source, NULL, OMXCodec::kClientNeedsFramebuffer);
    if ((*s->decoder)->start() !=  OK) {
        //av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
        ret = -1;
        s->client->disconnect();
        goto fail;
    }

    outFormat = (*s->decoder)->getFormat();
    outFormat->findInt32(kKeyColorFormat, &colorFormat);
//    if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
//        colorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
//        avctx->pix_fmt = PIX_FMT_NV21;
//    } else {
//        avctx->pix_fmt = PIX_FMT_YUV420P;
//    }
    s->color_fmt = colorFormat;

    outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
    if (s->decoder_component) {
    	//s->decoder_component = av_strdup(s->decoder_component);
    	const char* compstr = s->decoder_component;
    	int compLen = strlen(compstr) + 1;
    	s->decoder_component = new char[compLen];
    	if (NULL != s->decoder_component) {
    		memset((void*)s->decoder_component, 0, compLen);
    		strcpy((char*)s->decoder_component, compstr);
    	}
    }

    pthread_mutex_init(&s->in_mutex, NULL);
    pthread_mutex_init(&s->out_mutex, NULL);
    pthread_cond_init(&s->condition, NULL);

    s->converter = NULL;
    return 0;

fail:
    //av_bitstream_filter_close(s->bsfc);
    //av_freep(&s->orig_extradata);
    //av_freep(&s->end_frame);
	if (NULL != s->end_frame)
		delete s->end_frame;
	if (NULL != s->in_queue)
		delete s->in_queue;
	if (NULL != s->out_queue)
		delete s->out_queue;
	if (NULL != s->client)
		delete s->client;
	if (NULL != s->converter)
		delete s->converter;
    return ret;
}

//static int Stagefright_decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt)
static int Stagefright_decode_frame(StagefrightContext* s, int isKey, char* src, int src_size, char* dst, int dst_size)
{
    //StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
    Frame *frame;
    MediaBuffer *mbuffer;
    status_t status;
    size_t size;
    uint8_t *buf;
    const uint8_t *src_data[3];
    int w, h;
    int src_linesize[3];
    //int orig_size = avpkt->size;
    //AVPacket pkt = *avpkt;
    int orig_size = src_size;
    int ret;

    LOGD("1");
    if (!s->thread_started) {
        //pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx);
    	pthread_create(&s->decode_thread_id, NULL, &decode_thread, s);
        s->thread_started = true;
    }

//    if (avpkt && avpkt->data) {
//        av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
//                                   avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
//        avpkt = &pkt;
//    }

    LOGD("2");
    if (!s->source_done) {
    	LOGD("2 2");
        if(!s->dummy_buf) {
            //s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
        	s->dummy_buf = new uint8_t[src_size];
            if (!s->dummy_buf) {
                //return AVERROR(ENOMEM);
            	return -1;
            }
            //s->dummy_bufsize = avpkt->size;
            //memcpy(s->dummy_buf, avpkt->data, avpkt->size);
            s->dummy_bufsize = src_size;
            memcpy(s->dummy_buf, src, src_size);
        }

        //frame = (Frame*)av_mallocz(sizeof(Frame));
        frame = new Frame;
        //if (avpkt->data) {
        if (src) {
        	LOGD("2 3");
            frame->status  = OK;
            //frame->size    = avpkt->size;
            frame->size    = src_size;
            // Stagefright can't handle negative timestamps -
            // if needed, work around this by offsetting them manually?
            // TODO
//            if (avpkt->pts >= 0)
//                frame->time    = avpkt->pts;
            frame->time = 0;
            //frame->time = s->time;
            //s->time += 10;

            //frame->key     = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
            frame->key = isKey;
            //frame->buffer  = (uint8_t*)av_malloc(avpkt->size);
            frame->buffer  = new uint8_t[src_size];
            if (!frame->buffer) {
            	LOGD("2 3 1");
                //av_freep(&frame);
                //return AVERROR(ENOMEM);
            	delete frame;
            	return -1;
            }
            //uint8_t *ptr = avpkt->data;
            // The OMX.SEC decoder fails without this.
//            if (avpkt->size == orig_size + avctx->extradata_size) {
//                ptr += avctx->extradata_size;
//                frame->size = orig_size;
//            }

//            uint8_t *copy_ptr = (uint8_t*)src;
//            int copy_len = src_size;
//            uint8_t unit_type = 0;
//            for (int i=0;i<src_size-4;i++) {
//            	if ((copy_ptr[i] == 0x00) && (copy_ptr[i+1] == 0x00) && (copy_ptr[i+2] == 0x01)) {
//            		unit_type = (copy_ptr[i+3] & 0x1F);
//            	} else if ((copy_ptr[i] == 0x00) && (copy_ptr[i+1] == 0x00) && (copy_ptr[i+2] == 0x01) && (copy_ptr[i+3] == 0x01)) {
//            		unit_type = (copy_ptr[i+4] & 0x1F);
//            	}
//            }
            uint8_t *ptr = (uint8_t*)src;
            memcpy(frame->buffer, ptr, src_size);
        } else {
        	LOGD("2 4");
            frame->status  = ERROR_END_OF_STREAM;
            s->source_done = true;
        }

        LOGD("3");
        while (true) {
            if (s->thread_exited) {
                s->source_done = true;
                break;
            }
            pthread_mutex_lock(&s->in_mutex);
            if (s->in_queue->size() >= 10) {
                pthread_mutex_unlock(&s->in_mutex);
                usleep(10000);
                continue;
            }
            s->in_queue->push_back(frame);
            pthread_cond_signal(&s->condition);
            pthread_mutex_unlock(&s->in_mutex);
            break;
        }

        LOGD("4");
    }

    LOGD("5");
    while (true) {
        pthread_mutex_lock(&s->out_mutex);
        if (!s->out_queue->empty()) break;
        pthread_mutex_unlock(&s->out_mutex);
        if (s->source_done) {
            usleep(10000);
            continue;
        } else {
        	return 0;
        }
    }

    LOGD("6");

    frame = *s->out_queue->begin();
    s->out_queue->erase(s->out_queue->begin());
    pthread_mutex_unlock(&s->out_mutex);

    mbuffer = frame->mbuffer;
    status  = frame->status;
    size    = frame->size;
    w       = frame->w;
    h       = frame->h;
    //av_freep(&frame);
    delete frame;

    if (status == ERROR_END_OF_STREAM)
        return 0;

    if (status != OK) {
//        if (status == AVERROR(ENOMEM))
//            return status;
    	if (status == ERROR_NO_MEM)
    		return status;
        //av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
        return -1;
    }

    LOGD("71 %d %d", w, h);
    // The OMX.SEC decoder doesn't signal the modified width/height
//    if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
//        (w & 15 || h & 15)) {
//        if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == size) {
//            w = (w + 15)&~15;
//            h = (h + 15)&~15;
//        }
//    }

//    if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
//        avctx->width  = w;
//        avctx->height = h;
//    }
//
//    ret = avctx->reget_buffer(avctx, &s->ret_frame);
//    if (ret < 0) {
//        av_log(avctx, AV_LOG_ERROR, "reget buffer() failed\n");
//        goto end;
//    }

    src_linesize[0] = w;
//    if (avctx->pix_fmt == PIX_FMT_YUV420P)
//        src_linesize[1] = src_linesize[2] = w/2;
//    else if (avctx->pix_fmt == PIX_FMT_NV21)
//        src_linesize[1] = w;
    if (s->color_fmt == OMX_QCOM_COLOR_FormatYVU420SemiPlanar || s->color_fmt == OMX_COLOR_FormatYUV420SemiPlanar)
        src_linesize[1] = src_linesize[2] = w/2;
    else
        src_linesize[1] = w;


    buf = (uint8_t*)mbuffer->data();
    src_data[0] = buf;
    src_data[1] = buf + src_linesize[0] * h;
    src_data[2] = src_data[1] + src_linesize[1] * h/2;
    // TODO
//    av_image_copy(s->ret_frame.data, s->ret_frame.linesize,
//                  src_data, src_linesize,
//                  avctx->pix_fmt, avctx->width, avctx->height);
//
//    *data_size = sizeof(AVFrame);
//    *(AVFrame*)data = s->ret_frame;
//    ret = orig_size;
    LOGD("7 %p %d %d", buf, w, h);
    if (NULL == s->converter)
    	s->converter = new ColorConverter((OMX_COLOR_FORMATTYPE)s->color_fmt, OMX_COLOR_Format16bitRGB565);

    status_t convState = s->converter->convert(buf, w, h, 0, 0, w, h, dst, w, h, 0, 0, w, h);
    LOGD("8 %d", convState);
    ret = size;
end:
    mbuffer->release();
    LOGD("9");
    return ret;
}

//static av_cold int Stagefright_close(AVCodecContext *avctx)
static int Stagefright_close(StagefrightContext *s)
{
    //StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
    Frame *frame;
    LOGD("close 1");
    if (s->thread_started) {
    	LOGD("close 2");
        if (!s->thread_exited) {
            s->stop_decode = 1;

            LOGD("close 3");
            // Make sure decode_thread() doesn't get stuck
            pthread_mutex_lock(&s->out_mutex);
            while (!s->out_queue->empty()) {
                frame = *s->out_queue->begin();
                s->out_queue->erase(s->out_queue->begin());
                if (frame->size)
                    frame->mbuffer->release();
                //av_freep(&frame);
                delete frame;
            }
            pthread_mutex_unlock(&s->out_mutex);

            LOGD("close 4");
            // Feed a dummy frame prior to signalling EOF.
            // This is required to terminate the decoder(OMX.SEC)
            // when only one frame is read during stream info detection.
            //if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
            if (s->dummy_buf && (frame = new Frame)) {
                frame->status = OK;
                frame->size   = s->dummy_bufsize;
                frame->key    = 1;
                frame->buffer = s->dummy_buf;
                pthread_mutex_lock(&s->in_mutex);
                s->in_queue->push_back(frame);
                pthread_cond_signal(&s->condition);
                pthread_mutex_unlock(&s->in_mutex);
                s->dummy_buf = NULL;
            }

            pthread_mutex_lock(&s->in_mutex);
            s->end_frame->status = ERROR_END_OF_STREAM;
            s->in_queue->push_back(s->end_frame);
            pthread_cond_signal(&s->condition);
            pthread_mutex_unlock(&s->in_mutex);
            s->end_frame = NULL;
        }

        LOGD("close 5");
        pthread_join(s->decode_thread_id, NULL);

//        if (s->ret_frame.data[0])
//            avctx->release_buffer(avctx, &s->ret_frame);

        s->thread_started = false;
    }

    LOGD("close 6");
    while (!s->in_queue->empty()) {
        frame = *s->in_queue->begin();
        s->in_queue->erase(s->in_queue->begin());
        if (frame->size) {
            //av_freep(&frame->buffer);
        	delete frame->buffer;
        }
        //av_freep(&frame);
        delete frame;
    }

    LOGD("close 7");
    while (!s->out_queue->empty()) {
        frame = *s->out_queue->begin();
        s->out_queue->erase(s->out_queue->begin());
        if (frame->size)
            frame->mbuffer->release();
        //av_freep(&frame);
        delete frame;
    }

    LOGD("close 8");
    (*s->decoder)->stop();
    s->client->disconnect();

    LOGD("close 9");

    if (s->decoder_component) {
        //av_freep(&s->decoder_component);
    	delete s->decoder_component;
    }
    //av_freep(&s->dummy_buf);
    //av_freep(&s->end_frame);
    if (NULL != s->dummy_buf)
    	delete s->dummy_buf;
    if (NULL != s->end_frame)
    	delete s->end_frame;

    // Reset the extradata back to the original mp4 format, so that
    // the next invocation (both when decoding and when called from
    // av_find_stream_info) get the original mp4 format extradata.
//    av_freep(&avctx->extradata);
//    avctx->extradata = s->orig_extradata;
//    avctx->extradata_size = s->orig_extradata_size;

    if (NULL != s->in_queue)
    	delete s->in_queue;
    if (NULL != s->out_queue)
    	delete s->out_queue;
    if (NULL != s->client)
    	delete s->client;
    if (NULL != s->decoder)
    	delete s->decoder;
    if (NULL != s->source)
    	delete s->source;

    pthread_mutex_destroy(&s->in_mutex);
    pthread_mutex_destroy(&s->out_mutex);
    pthread_cond_destroy(&s->condition);
    //av_bitstream_filter_close(s->bsfc);

    LOGD("close 10");
    if (NULL != s->converter)
    	delete s->converter;

    LOGD("close 11");
    return 0;
}

//AVCodec ff_libstagefright_h264_decoder = {
//    "libstagefright_h264",
//    NULL_IF_CONFIG_SMALL("libstagefright H.264"),
//    AVMEDIA_TYPE_VIDEO,
//    CODEC_ID_H264,
//    CODEC_CAP_DELAY,
//    NULL, //supported_framerates
//    NULL, //pix_fmts
//    NULL, //supported_samplerates
//    NULL, //sample_fmts
//    NULL, //channel_layouts
//    0,    //max_lowres
//    NULL, //priv_class
//    NULL, //profiles
//    sizeof(StagefrightContext),
//    NULL, //next
//    NULL, //init_thread_copy
//    NULL, //update_thread_context
//    NULL, //defaults
//    NULL, //init_static_data
//    Stagefright_init,
//    NULL, //encode
//    NULL, //encode2
//    Stagefright_decode_frame,
//    Stagefright_close,
//};


//typedef struct EyeDecoderContext {
//	AVCodec*		mCodec;
//	AVCodecContext* mContext;
//	AVFrame* 		mFrame;
//	AVPacket*       mPacket;
//	struct SwsContext* 	mSwsContext;
//	uint8_t*        mRGBPtr[1];
//	int				mRGBStride[1];
//} EyeDecoderContext;

//static int init(EyeDecoderContext* context) {
//	int ret = 0;
//
//	if (NULL == context)
//		return 0;
//
//	context->mCodec = NULL;
//	context->mContext = NULL;
//	context->mSwsContext = NULL;
//	context->mFrame = NULL;
//	context->mPacket = NULL;
//
//	avcodec_register_all();
//
//	context->mCodec = avcodec_find_decoder(CODEC_ID_H264);
//	if (context->mCodec == NULL)
//		return 0;
//
//	context->mContext = avcodec_alloc_context3(context->mCodec);
//	if (context->mContext == NULL)
//		return 0;
//
//	context->mFrame = avcodec_alloc_frame();
//	if (context->mFrame == NULL)
//		return 0;
//
//	context->mContext->flags |= CODEC_FLAG_EMU_EDGE | CODEC_FLAG_LOW_DELAY;
//	//mContext->debug |= FF_DEBUG_MMCO;
//
//	ret = avcodec_open2(context->mContext, context->mCodec, NULL);
//	if (ret != 0)
//		return 0;
//
//	return 1;
//}
//
//static void destroy(EyeDecoderContext* context) {
//	if (NULL == context)
//		return;
//
//	if (NULL != context->mSwsContext) {
//		sws_freeContext(context->mSwsContext);
//		context->mSwsContext = NULL;
//	}
//
//	if (NULL != context->mPacket) {
//		av_free(context->mPacket);
//		context->mPacket = NULL;
//	}
//
//	if (NULL != context->mFrame) {
//		av_free(context->mFrame);
//		context->mFrame = NULL;
//	}
//
//	if (NULL != context->mContext) {
//		avcodec_close(context->mContext);
//		av_free(context->mContext);
//		context->mContext = NULL;
//	}
//
//	context->mCodec = NULL;
//}
//
//static int decodeFrame(EyeDecoderContext* context, char* src, int srcLen, char* dst, int dstLen) {
//	int ret = 0;
//	int gotten = 0;
//	int sws_flags = 1;
//	int dstH = 0;
//
//	if (NULL == context)
//		return -1;
//
//	if (NULL == context->mPacket)
//		context->mPacket = (AVPacket*)av_malloc(sizeof(AVPacket));
//
//	av_init_packet(context->mPacket);
//	context->mPacket->data = (uint8_t*)src;
//	context->mPacket->size = srcLen;
//	ret = avcodec_decode_video2(context->mContext, context->mFrame, &gotten, context->mPacket);
//	if (ret <= 0)
//		return ret;
//
//	//sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
//	context->mSwsContext = sws_getCachedContext(context->mSwsContext,
//			context->mFrame->width, context->mFrame->height, (PixelFormat)context->mFrame->format, context->mFrame->width, context->mFrame->height,
//			PIX_FMT_RGB565BE, sws_flags, NULL, NULL, NULL);
//	if (NULL == context->mSwsContext)
//		return -1;
//
//	if (context->mFrame->width*context->mFrame->height*2 > dstLen)
//		return -1;
//
//	context->mRGBPtr[0] = (uint8_t*)dst;
//	context->mRGBStride[0] = context->mFrame->width;
//    dstH = sws_scale(context->mSwsContext, context->mFrame->data, context->mFrame->linesize,
//              0, context->mFrame->height, context->mRGBPtr, context->mRGBStride);
//    if (dstH != context->mFrame->height)
//    	return -1;
//
//    return ret;
//}

extern "C" {
JNIEXPORT jlong Java_ezeye_decoder_StageFrightDecoder_Initialize( JNIEnv* env, jobject thiz, jint width, jint height );
JNIEXPORT void Java_ezeye_decoder_StageFrightDecoder_Destroy( JNIEnv* env, jobject thiz, jlong contextIn );
JNIEXPORT jint Java_ezeye_decoder_StageFrightDecoder_DecodeFrame( JNIEnv* env, jobject thiz, jlong contextIn, jboolean isKey, jbyteArray src, jint srcPos, jint srcLen, jbyteArray dst, jint dstPos, jint dstLen, jobject decodeInfo);
}

JNIEXPORT jlong Java_ezeye_decoder_StageFrightDecoder_Initialize( JNIEnv* env, jobject thiz, jint width, jint height )
{
	StagefrightContext* context = new StagefrightContext;
	if (NULL == context)
		return (jlong)NULL;

	context->width = width;
	context->height = height;
	int suc = Stagefright_init(context);
	//int suc = init(context);
	if (suc < 0) {
		//destroy(context);
		Stagefright_close(context);
		delete context;
		return (jlong)NULL;
	}

	return (jlong)context;
}

JNIEXPORT void Java_ezeye_decoder_StageFrightDecoder_Destroy( JNIEnv* env, jobject thiz, jlong contextIn )
{
	StagefrightContext* context = (StagefrightContext*)contextIn;
	if (NULL == context)
		return;

	//destroy(context);
	Stagefright_close(context);
	delete context;
}

//jint Java_com_example_hellojni_HelloJni_DecodeFrame( JNIEnv* env,
//                                                  jobject thiz, jlong contextIn, jbyteArray src, jint srcPos, jint srcLen, jbyteArray dst, jint dstPos, jint dstLen, jobject decodeInfo)
//{
//	EyeDecoderContext* context = (EyeDecoderContext*)contextIn;
//	if (NULL == context)
//		return -1;
//
//	jsize srcArrayLen = (*env)->GetArrayLength(env, src);
//	if (srcPos < 0 || srcLen <= 0 || (srcPos + srcLen <= 0) || (srcPos + srcLen > srcArrayLen))
//		return -1;
//
//	jsize dstArrayLen = (*env)->GetArrayLength(env, dst);
//	if (dstPos < 0 || dstLen <= 0 || (dstPos + dstLen <= 0) || (dstPos + dstLen > dstArrayLen))
//		return -1;
//
//	jbyte* srcData = (*env)->GetByteArrayElements(env, src, NULL);
//	if (NULL == srcData)
//		return -1;
//
//	jbyte* dstData = (*env)->GetByteArrayElements(env, dst, NULL);
//	if (NULL == dstData)
//		return -1;
//
//	int ret = decodeFrame(context, (char*)srcData+srcPos, srcLen, (char*)dstData+dstPos, dstLen);
//	if (ret <= 0)
//		return ret;
//
//	if (NULL != decodeInfo) {
//	    jclass objectClass = (*env)->FindClass(env, "eye/decoder/StageFrightDecoder$DecodeInfo");
//	    if (NULL != objectClass) {
//			jfieldID fSize = (*env)->GetFieldID(env, objectClass,"mSize","I");
//			jfieldID fWidth = (*env)->GetFieldID(env, objectClass,"mWidth","I");
//			jfieldID fHeight = (*env)->GetFieldID(env, objectClass,"mHeight","I");
//			int size = ret;
//			int width = context->mFrame->width;
//			int height = context->mFrame->height;
//			if (NULL != fSize)
//				(*env)->SetIntField(env, decodeInfo, fSize, size);
//			if (NULL != fWidth)
//				(*env)->SetIntField(env, decodeInfo, fWidth, width);
//			if (NULL != fHeight)
//				(*env)->SetIntField(env, decodeInfo, fHeight, height);
//	    }
//	}
//
//	return ret;
//}

JNIEXPORT jint Java_ezeye_decoder_StageFrightDecoder_DecodeFrame( JNIEnv* env,
                                                  jobject thiz, jlong contextIn, jboolean isKey, jbyteArray src, jint srcPos, jint srcLen, jbyteArray dst, jint dstPos, jint dstLen, jobject decodeInfo)
{
	StagefrightContext* context = (StagefrightContext*)contextIn;
	if (NULL == context)
		return -1;

	jsize srcArrayLen = env->GetArrayLength(src);
	if (srcPos < 0 || srcLen <= 0 || (srcPos + srcLen <= 0) || (srcPos + srcLen > srcArrayLen))
		return -1;

	jsize dstArrayLen = env->GetArrayLength(dst);
	if (dstPos < 0 || dstLen <= 0 || (dstPos + dstLen <= 0) || (dstPos + dstLen > dstArrayLen))
		return -1;

	jbyte* srcData = env->GetByteArrayElements(src, NULL);
	if (NULL == srcData)
		return -1;

	jbyte* dstData = env->GetByteArrayElements(dst, NULL);
	if (NULL == dstData)
		return -1;

	int isKeyFrame = (isKey != 0) ? 1 : 0;
	int decodedLen = Stagefright_decode_frame(context, isKeyFrame, (char*)srcData+srcPos, srcLen, (char*)dstData+dstPos, dstLen);
	//int ret = decodeFrame(context, (char*)srcData+srcPos, srcLen, (char*)dstData+dstPos, dstLen);
	if (decodedLen <= 0)
		return decodedLen;

	if (NULL != decodeInfo) {
	    jclass objectClass = env->FindClass("ezeye/decoder/StageFrightDecoder$DecodeInfo");
	    if (NULL != objectClass) {
			jfieldID fSize = env->GetFieldID(objectClass,"mSize","I");
			jfieldID fWidth = env->GetFieldID(objectClass,"mWidth","I");
			jfieldID fHeight = env->GetFieldID(objectClass,"mHeight","I");
			int size = decodedLen;
			//int width = context->mFrame->width;
			//int height = context->mFrame->height;
			int width = context->width;
			int height = context->height;
			if (NULL != fSize)
				env->SetIntField(decodeInfo, fSize, size);
			if (NULL != fWidth)
				env->SetIntField(decodeInfo, fWidth, width);
			if (NULL != fHeight)
				env->SetIntField(decodeInfo, fHeight, height);
	    }
	}

	return decodedLen;
}
