//
//  MoboPlayerDecoder.m
//  MoboPlayer
//
//  Created by Kolyvan on 15.10.12.
//  Copyright (c) 2012 Konstantin Boukreev . All rights reserved.
//
//  https://github.com/kolyvan/MoboPlayer
//  this file is part of MoboPlayer
//  MoboPlayer is licenced under the LGPL v3, see lgpl-3.0.txt

#import "MoboPlayerDecoder.h"
#import <UIKit/UIKit.h>
#import "MoboAudioManager.h"
#import "MoboLogger.h"
#import "libyuv.h"
#import <libavfilter/buffersrc.h>
#import <libavfilter/buffersink.h>
////////////////////////////////////////////////////////////////////////////////
NSString * MoboPlayerErrorDomain = @"ru.kolyvan.MoboPlayer";
static void FFLog(void* context, int level, const char* format, va_list args);

static NSError * moboPlayerError (NSInteger code, id info)
{
    NSDictionary *userInfo = nil;
    
    if ([info isKindOfClass: [NSDictionary class]]) {
        
        userInfo = info;
        
    } else if ([info isKindOfClass: [NSString class]]) {
        
        userInfo = @{ NSLocalizedDescriptionKey : info };
    }
    
    return [NSError errorWithDomain:MoboPlayerErrorDomain
                               code:code
                           userInfo:userInfo];
}

static NSString * errorMessage (MoboPlayerError errorCode)
{
    switch (errorCode) {
        case MoboPlayerErrorNone:
            return @"";
            
        case MoboPlayerErrorOpenFile:
            return NSLocalizedString(@"Unable to open file", nil);
            
        case MoboPlayerErrorStreamInfoNotFound:
            return NSLocalizedString(@"Unable to find stream information", nil);
            
        case MoboPlayerErrorStreamNotFound:
            return NSLocalizedString(@"Unable to find stream", nil);
            
        case MoboPlayerErrorCodecNotFound:
            return NSLocalizedString(@"Unable to find codec", nil);
            
        case MoboPlayerErrorOpenCodec:
            return NSLocalizedString(@"Unable to open codec", nil);
            
        case MoboPlayerErrorAllocateFrame:
            return NSLocalizedString(@"Unable to allocate frame", nil);
            
        case MoboPlayerErroSetupScaler:
            return NSLocalizedString(@"Unable to setup scaler", nil);
            
        case MoboPlayerErroReSampler:
            return NSLocalizedString(@"Unable to setup resampler", nil);
            
        case MoboPlayerErroUnsupported:
            return NSLocalizedString(@"The ability is not supported", nil);
    }
}

////////////////////////////////////////////////////////////////////////////////

static BOOL audioCodecIsSupported(AVCodecContext *audio)
{
    if (audio->sample_fmt == AV_SAMPLE_FMT_S16) {

        id<MoboAudioManager> audioManager = [MoboAudioManager audioManager];
        return  (int)audioManager.samplingRate == audio->sample_rate &&
                audioManager.numOutputChannels == audio->channels;
    }
    return NO;
}


static void avStreamFPSTimeBase(AVStream *st, CGFloat defaultTimeBase, CGFloat *pFPS, CGFloat *pTimeBase)
{
    CGFloat fps, timebase;
    
    if (st->time_base.den && st->time_base.num)
        timebase = av_q2d(st->time_base);
//    else if(st->codec->time_base.den && st->codec->time_base.num)
//        timebase = av_q2d(st->codec->time_base);
    else
        timebase = defaultTimeBase;
    

    if (st->avg_frame_rate.den && st->avg_frame_rate.num)
        fps = av_q2d(st->avg_frame_rate);
    else if (st->r_frame_rate.den && st->r_frame_rate.num)
        fps = av_q2d(st->r_frame_rate);
    else
        fps = 1.0 / timebase;
    
    if (pFPS)
        *pFPS = fps;
    if (pTimeBase)
        *pTimeBase = timebase;
}

static NSArray *collectStreams(AVFormatContext *formatCtx, enum AVMediaType codecType)
{
    NSMutableArray *ma = [NSMutableArray array];
    for (NSInteger i = 0; i < formatCtx->nb_streams; ++i)
        if (codecType == formatCtx->streams[i]->codecpar->codec_type)
            [ma addObject: [NSNumber numberWithInteger: i]];
    return [ma copy];
}

static NSData * copyFrameData(UInt8 *src, int linesize, int width, int height)
{
    width = MIN(linesize, width);
    NSMutableData *md = [NSMutableData dataWithLength: width * height];
    Byte *dst = md.mutableBytes;
    for (NSUInteger i = 0; i < height; ++i) {
        memcpy(dst, src, width);
        dst += width;
        src += linesize;
    }
    return md;
}




static BOOL isNetworkPath (NSString *path)
{
    NSRange r = [path rangeOfString:@":"];
    if (r.location == NSNotFound)
        return NO;
    NSString *scheme = [path substringToIndex:r.length];
    if ([scheme isEqualToString:@"file"])
        return NO;
    return YES;
}

static int interrupt_callback(void *ctx);



////字幕信息管理类
//@interface SubtitleDataClass ()
//@property (readwrite,nonatomic) AVFormatContext     *_fmt_ctx;
////@property (readonly,nonatomic) AVCodecContext      *CodecCtx;
//@property (readwrite,nonatomic) AVStream            *_subtitlestream;
//@property (readwrite,nonatomic) NSUInteger           _subtitle_index;
//@property (readwrite,nonatomic) BOOL                 _is_decoding;       //是否被选择解码
//@property (readwrite,nonatomic) BOOL                 _is_seaching;       //是否是外置字幕
//@end

////////////////////////////////////////////////////////////////////////////////

@interface MoboPlayerFrame()
@property (readwrite, nonatomic) CGFloat position;
@property (readwrite, nonatomic) CGFloat duration;
@end





@implementation MoboPlayerFrame
@end

@interface MoboAudioFrame()
@property (readwrite, nonatomic, strong) NSData *samples;
@end

@implementation MoboAudioFrame
- (MoboPlayerFrameType) type { return MoboPlayerFrameTypeAudio; }
@end

@interface MoboVideoFrame()
@property (readwrite, nonatomic) NSUInteger width;
@property (readwrite, nonatomic) NSUInteger height;
@end

@implementation MoboVideoFrame
- (MoboPlayerFrameType) type { return MoboPlayerFrameTypeVideo; }
@end

@interface MoboVideoFrameRGB ()
@property (readwrite, nonatomic) NSUInteger linesize;
@property (readwrite, nonatomic, strong) NSData *rgb;
@end

@implementation MoboVideoFrameRGB
- (MoboVideoFrameFormat) format { return MoboVideoFrameFormatRGB; }
- (UIImage *) asImage
{
    UIImage *image = nil;
    
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(_rgb));
    if (provider) {
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        if (colorSpace) {
            CGImageRef imageRef = CGImageCreate(self.width,
                                                self.height,
                                                8,
                                                24,
                                                self.linesize,
                                                colorSpace,
                                                kCGBitmapByteOrderDefault,
                                                provider,
                                                NULL,
                                                YES, // NO
                                                kCGRenderingIntentDefault);
            
            if (imageRef) {
                image = [UIImage imageWithCGImage:imageRef];
                CGImageRelease(imageRef);
            }
            CGColorSpaceRelease(colorSpace);
        }
        CGDataProviderRelease(provider);
    }
    
    return image;
}
@end

@interface MoboVideoFrameYUV()
@property (readwrite, nonatomic, strong) NSData *luma;
@property (readwrite, nonatomic, strong) NSData *chromaB;
@property (readwrite, nonatomic, strong) NSData *chromaR;
@end

@implementation MoboVideoFrameYUV
- (MoboVideoFrameFormat) format { return MoboVideoFrameFormatYUV; }
@end

@interface MoboArtworkFrame()
@property (readwrite, nonatomic, strong) NSData *picture;
@end

@implementation MoboArtworkFrame
- (MoboPlayerFrameType) type { return MoboPlayerFrameTypeArtwork; }
- (UIImage *) asImage
{
    UIImage *image = nil;
    
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(_picture));
    if (provider) {
        
        CGImageRef imageRef = CGImageCreateWithJPEGDataProvider(provider,
                                                                NULL,
                                                                YES,
                                                                kCGRenderingIntentDefault);
        if (imageRef) {
            
            image = [UIImage imageWithCGImage:imageRef];
            CGImageRelease(imageRef);
        }
        CGDataProviderRelease(provider);
    }
    
    return image;

}
@end

@interface MoboSubtitleFrame()
@property (readwrite, nonatomic, strong) NSString *text;
@end

@implementation MoboSubtitleFrame
- (MoboPlayerFrameType) type { return MoboPlayerFrameTypeSubtitle; }
@end




@interface SubtitleDataClass()
@property (readwrite,nonatomic) AVFormatContext     *_fmt_ctx;
@property (readwrite,nonatomic) AVCodecContext      *CodecCtx;
@property (readwrite,nonatomic) AVStream            *_subtitlestream;
@property (readwrite,nonatomic) NSUInteger           insubtitle_index;
@property (readwrite,nonatomic) NSUInteger           outsubtitle_index;
@property (readwrite,nonatomic) BOOL                 _is_decoding;       //是否被选择解码
@property (readwrite,nonatomic) BOOL                 _is_seaching;       //是否是外置字幕
@end

@implementation SubtitleDataClass



@end

////////////////////////////////////////////////////////////////////////////////

@interface MoboPlayerDecoder () {
    
    AVFormatContext     *_formatCtx;
	AVCodecContext      *_videoCodecCtx;
    AVCodecContext      *_audioCodecCtx;
    AVCodecContext      *_subtitleCodecCtx;
//    AVFrame             *_PictureFrame;
    
//    AVFrame   *_videoFrames;
    AVFrame             *_videoFrame;
    AVFrame             *_currentvideoFrame;
    AVFrame             *_audioFrame;
<<<<<<< HEAD
    AVFrame             *_filt_frame ;
=======
//    AVFrame             *_filt_frame ;
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    NSInteger           _videoStream;
    NSInteger           _audioStream;
   // NSMutableArray *           _selectsubtitleStreamarry;
//	AVPicture           _picture;
//    AVPicture           _picturesave;
//    BOOL                _picturesaveValid;
//
    BOOL                _pictureValid;
    struct SwsContext   *_swsContext;
    CGFloat             _videoTimeBase;
    CGFloat             _audioTimeBase;
    CGFloat             _position;
    NSArray             *_videoStreams;
    NSArray             *_audioStreams;
   // NSMutableArray      *_subtitleStreams;
    SwrContext          *_swrContext;
    void                *_swrBuffer;
    NSUInteger          _swrBufferSize;
    NSDictionary        *_streaminfo;
    MoboVideoFrameFormat  _videoFrameFormat;
    NSUInteger          _artworkStream;
    NSInteger           _subtitleASSEvents;
    NSLock              *_decodelock;
    NSLock              *_filterlock;
    AVFilterContext *buffersink_ctx;
    AVFilterContext *buffersrc_ctx;
    AVFilterGraph *filter_graph;
//    float               _playspeed;
    
    AVFilterContext *buffersink_ctx;
    AVFilterContext *buffersrc_ctx;
    AVFilterGraph *filter_graph;
//    float               _playspeed;
    
    
}
@end

@implementation MoboPlayerDecoder

@dynamic duration;
@dynamic position;
@dynamic frameWidth;
@dynamic frameHeight;
@dynamic sampleRate;
@dynamic audioStreamsCount;
@dynamic subtitleStreamsCount;
@dynamic selectedAudioStream;
@dynamic selectedSubtitleStreams;
@dynamic validAudio;
@dynamic validVideo;
@dynamic validSubtitles;
@dynamic streaminfo;
@dynamic videoStreamFormatName;
@dynamic startTime;

- (CGFloat) duration
{
    if (!_formatCtx)
        return 0;
    if (_formatCtx->duration == AV_NOPTS_VALUE)
        return MAXFLOAT;
    return (CGFloat)_formatCtx->duration / AV_TIME_BASE;
}

- (CGFloat) position
{
    return _position;
}

- (void) setPosition: (CGFloat)seconds
{
    _position = seconds;
    _isEOF = NO;
    if (_videoStream != -1) {
        int64_t ts = (int64_t)(seconds / _videoTimeBase);
           avformat_seek_file(_formatCtx, (int)_videoStream, 0, ts, INT64_MAX, AVSEEK_FLAG_FRAME);
        avcodec_flush_buffers(_videoCodecCtx);
    }else  if (_audioStream != -1) {
        int64_t ts = (int64_t)(seconds / _audioTimeBase);
        avformat_seek_file(_formatCtx, (int)_audioStream, 0, ts, INT64_MAX, AVSEEK_FLAG_FRAME);
        avcodec_flush_buffers(_audioCodecCtx);
    }
}


-(UIImage *)savepicture:(NSInteger) pwidth
                 height:(NSInteger) pheight;
{
    
   // return nil;
    [_decodelock lock];
     UIImage *image;
        AVPicture           _picturesave;
        AVPacket packet;
        BOOL finished = NO;
    
        while (!finished) {
    
            if (av_read_frame(_formatCtx, &packet) < 0) {
                _isEOF = YES;
                break;
            }
    
            if (packet.stream_index ==_videoStream) {
                int len = avcodec_send_packet(_videoCodecCtx, &packet);
    
                do {
                    len  = avcodec_receive_frame(_videoCodecCtx, _videoFrame);
                    //                    NSLog(@" this is a  test lib");
                    if(len == 0)
                    {
                        if (_videoFrame->data[0]) {
                            finished =true;
                            break;
                        }
    
                    }
                    
                } while (len == 0);
        
            }
        }

    
    BOOL   _picturesaveValid = avpicture_alloc(&_picturesave,
                                      AV_PIX_FMT_RGB24,
                                      _videoCodecCtx->width,
                                      _videoCodecCtx->height) == 0;
    if (!_picturesaveValid) {
        [_decodelock unlock];
        return NULL;
    }
//    NSLog(@"this  funtion      is  in    savepicture   ==================================function " );
    if (pwidth == 0 || pheight == 0) {
        pwidth  = _videoCodecCtx->width;
        pheight = _videoCodecCtx->height;
    }
    
    struct SwsContext   *sws_context = NULL;
    
    sws_context = sws_getCachedContext(sws_context,
                                       _videoCodecCtx->width,
                                       _videoCodecCtx->height,
                                       _videoCodecCtx->pix_fmt,
                                       (int32_t)pwidth,
                                       (int32_t)pheight,
                                       AV_PIX_FMT_RGB24,
                                       SWS_FAST_BILINEAR,
                                       NULL, NULL, NULL);
    
    sws_scale(sws_context,
              (const uint8_t **)_videoFrame->data,
              _videoFrame->linesize,
              0,
              _videoCodecCtx->height,
              _picturesave.data,
              _picturesave.linesize);
    
    
    NSData *rgb = [NSData dataWithBytes:_picturesave.data[0]
                                 length:_picturesave.linesize[0] * _videoCodecCtx->height];
    

    
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(rgb));
    if (provider) {
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        if (colorSpace) {
            CGImageRef imageRef = CGImageCreate(pwidth,
                                                pheight,
                                                8,
                                                24,
                                                _picturesave.linesize[0],
                                                colorSpace,
                                                kCGBitmapByteOrderDefault,
                                                provider,
                                                NULL,
                                                YES, // NO
                                                kCGRenderingIntentDefault);
            
            if (imageRef) {
                image = [UIImage imageWithCGImage:imageRef];
                CGImageRelease(imageRef);
            }
            CGColorSpaceRelease(colorSpace);
        }
        CGDataProviderRelease(provider);
    }
    
    if (!image) {
        [_decodelock unlock];
        return NULL ;
    }
    [_decodelock unlock];
    return image;
}
- (NSUInteger) frameWidth
{
    return _videoCodecCtx ? _videoCodecCtx->width : 0;
}

- (NSUInteger) frameHeight
{
    return _videoCodecCtx ? _videoCodecCtx->height : 0;
}

- (CGFloat) sampleRate
{
    return _audioCodecCtx ? _audioCodecCtx->sample_rate : 0;
}

- (NSUInteger) audioStreamsCount
{

    return [_audioStreams count];
}
- (NSArray *) audioStreamsNSArray
{
    NSMutableArray *ma = [NSMutableArray array];
    int countnum = 0;
    if (_audioStreams.count) {

        for (NSNumber *n in _audioStreams) {

            AVStream *st = _formatCtx->streams[n.integerValue];

           // AVDictionaryEntry *langlanguage = av_dict_get(st->metadata, "language", NULL, 0);
            AVDictionaryEntry *langtitle = av_dict_get(st->metadata, "title", NULL, 0);
            if (langtitle && langtitle->value) {
                [ma addObject:[NSString stringWithUTF8String:langtitle->value]];
            }else
            {
                countnum++;
                NSString * str = [NSString stringWithFormat:@"unknown%d",countnum];
                [ma addObject:str];
            }
            
        }
    }
    
    return [ma copy] ;
}
- (NSUInteger) subtitleStreamsCount
{
    return [_SubtitleStreamCodecMangeArray count];
}
-(NSArray*) subtitleStreamsNsArray
{
    NSMutableArray *ma = [NSMutableArray array];
    
    
    
//    NSLog(@" this is  in  subtitlestreamnsarray  gets %@",_SubtitleStreamCodecMangeArray);
    if (_SubtitleStreamCodecMangeArray.count) {
        
        for (SubtitleDataClass *msubtitledata in _SubtitleStreamCodecMangeArray) {
            if (!msubtitledata.is_seaching)
            {
                AVDictionaryEntry *langtitle = av_dict_get(msubtitledata.subtitlestream->metadata, "title", NULL, 0);
                if (langtitle && langtitle->value) {
                    [ma addObject:[NSString stringWithUTF8String:langtitle->value]];
                }
                
            }
            else{
//                NSLog(@" thie  name    = = = = = = =is %s",msubtitledata._fmt_ctx->iformat->);
                [ma addObject:msubtitledata.filepath];
                
            }
            
        }
    }
    return [ma copy] ;
}

- (NSInteger) selectedAudioStream
{
    if (_audioStream == -1)
        return -1;
    NSNumber *n = [NSNumber numberWithInteger:_audioStream];
    return [_audioStreams indexOfObject:n];        
}

- (void) setSelectedAudioStream:(NSInteger)selectedAudioStream
{
    NSInteger audioStream = [_audioStreams[selectedAudioStream] integerValue];
    [self closeAudioStream];
    MoboPlayerError errCode = [self openAudioStream: audioStream];
    if (MoboPlayerErrorNone != errCode) {
        LoggerAudio(0, @"%@", errorMessage(errCode));
    }
}

- (NSArray *) selectedSubtitleStreams
{
    if (_SubtitleStreamCodecMangeArray.count == 0)
        return NULL;
        NSMutableArray *ma = [NSMutableArray array];
    for (SubtitleDataClass * msubtitledata in _SubtitleStreamCodecMangeArray) {
        if(msubtitledata.is_decoding)
        {
            [ma addObject:[NSNumber numberWithInteger:msubtitledata.outsubtitle_index]];
        }
    }
    return [ma copy];
}
- (BOOL )OpenSubtitleFilePath:(NSString *) path
                     Filename:(NSString *) name
{
    if (path == NULL) {
        NSLog(@"PATH is NULL");
        return false;
    }
//    NSLog(path);
    AVFormatContext * FormatCtxExternal;
    MoboPlayerError errCode =MoboPlayerErrorNone ;
//    if (_interruptCallback) {
//        
        FormatCtxExternal = avformat_alloc_context();
        if (!FormatCtxExternal)
        {

            goto openerror;
        }

//        AVIOInterruptCB cb = {interrupt_callback, (__bridge void *)(self)};
//        FormatCtxExternal->interrupt_callback = cb;
//    }
//    NSLog(@"this  is  in  add subtitle 1");
    NSLog(@"%s",[path  cStringUsingEncoding: NSUTF8StringEncoding]);
    int  openformatnum =  avformat_open_input(&FormatCtxExternal, [path cStringUsingEncoding:NSUTF8StringEncoding], NULL, NULL);
    if (openformatnum < 0) {
        
        if (FormatCtxExternal)
            avformat_free_context(FormatCtxExternal);
        NSLog(@"this  is  in  add subtitle 1.1 : %d",openformatnum);
        errCode = MoboPlayerErrorOpenFile;
        goto openerror;
    }
    NSLog(@"this  is  in  add subtitle 2");
    if (avformat_find_stream_info(FormatCtxExternal, NULL) < 0) {
        
        avformat_close_input(&FormatCtxExternal);
        errCode = MoboPlayerErrorStreamInfoNotFound;
        goto openerror;
    }
    av_dump_format(FormatCtxExternal, 0, [path.lastPathComponent cStringUsingEncoding: NSUTF8StringEncoding], false);
    
    if (errCode == MoboPlayerErrorNone) {
    
        [self collectAddFileSubtitleStreams:FormatCtxExternal avmedia:AVMEDIA_TYPE_SUBTITLE filepath:name];
    }
//=          =====  将新添加文件的 字幕结构体avsubtitle 添加到解析队列
    NSLog(@"    THIS  IS SUBTITLE  STRING  PATH     %@", path);
    [self addsubtitlestringtoarry:FormatCtxExternal];
openerror:
    if (errCode != MoboPlayerErrorNone) {
        
//        [self closeFile];
        NSString *errMsg = errorMessage(errCode);
        LoggerStream(0, @"%@, %@", errMsg, path.lastPathComponent);
        return NO;
    }
    
    

    
    //关闭format
    if (FormatCtxExternal) {
//        avformat_free_context(FormatCtxExternal);
        avformat_close_input(&FormatCtxExternal);
    }
    
    
    
    NSLog(@"add subtitle file  sucess !!!!");
    return YES;
}
- (void) collectAddFileSubtitleStreams:(AVFormatContext *)formatCtx
                               avmedia:(NSInteger)  codecType
                              filepath:(NSString*) filepath
{
    int count = 0;
    if(!_SubtitleStreamCodecMangeArray)
    {
        _SubtitleStreamCodecMangeArray =[[NSMutableArray alloc] init];
    }
    else
    {
        count  = (int)[_SubtitleStreamCodecMangeArray count];
    }
//    int countnum = 0;
    for (NSInteger i = 0; i < formatCtx->nb_streams; ++i)
    {
        if (codecType == formatCtx->streams[i]->codec->codec_type)
        {
            if (_SubtitleStreamCodecMangeArray) {
                count  = (int)[_SubtitleStreamCodecMangeArray count];
            }
            SubtitleDataClass * msubtitledata  = [[SubtitleDataClass alloc] init];
            msubtitledata.fmt_ctx = formatCtx;
            msubtitledata.CodecCtx = formatCtx->streams[i]->codec;
            msubtitledata.subtitlestream =formatCtx->streams[i];
            msubtitledata.insubtitle_index =i;
            msubtitledata.outsubtitle_index =count;
            msubtitledata.is_seaching = true;
//            msubtitledata._is_decoding=true;
//            msubtitledata.filepath = [filepath lastPathComponent];
            msubtitledata.filepath = filepath ;
            //            msubtitledata.filepath = [NSString stringWithCString:[[filepath lastPathComponent] UTF8String] encoding:NSUnicodeStringEncoding];
            [_SubtitleStreamCodecMangeArray addObject:msubtitledata];
            NSLog(@" subtitle  is  select strem sucess !!!!!!");
        }
        
    }
}
-(void) addsubtitlestringtoarry:(AVFormatContext *)FormatCtxExternal
{
    
    AVPacket packet;
    BOOL finished = NO;
    SubtitleDataClass * msubtitledata =[_SubtitleStreamCodecMangeArray lastObject];
    
    
    MoboPlayerError errCode = [self openExtermalSubtitleStream];
    if (MoboPlayerErrorNone != errCode) {
        LoggerStream(0, @"%@", errorMessage(errCode));
        LoggerStream(0, @"当前 subtitle 打开失败！\n");
        return ;
    }
    
    msubtitledata.avsubtitlearry =[NSMutableArray array];
    do{
//        NSLog(@" this iis   decode  subtitleframe1");
        if (av_read_frame(FormatCtxExternal, &packet) < 0) {
            break;
        }

        if (packet.stream_index  == msubtitledata.insubtitle_index)
        {
            
            int pktSize = packet.size;
            
            while (pktSize > 0)
            {
                
                AVSubtitle subtitle;
                int gotsubtitle = 0;
                int len = avcodec_decode_subtitle2(msubtitledata.subtitlestream->codec,
                                                   &subtitle,
                                                   &gotsubtitle,
                                                   &packet);
//                NSLog(@"subtile %s",subtitle.rects);
                if (len < 0) {
                    LoggerStream(0, @"decode subtitle error, skip packet");
                    break;
                }
                
                if (gotsubtitle) {
                    
                    MoboSubtitleFrame *frame = [self handleSubtitle: &subtitle];
//                    NSLog(@" text: %@",frame.text);
                    if (frame) {
                        [msubtitledata.avsubtitlearry addObject:frame];
                    }
                    avsubtitle_free(&subtitle);
                }
                if (0 == len)
                    break;
                pktSize -= len;
            }
        }
        av_packet_unref(&packet);
    }while (!finished);

    
    if (msubtitledata.CodecCtx) {
        avcodec_close(msubtitledata.CodecCtx);
    }
    
//    NSLog(@" the subtitle arry count %lu",(unsigned long)msubtitledata.avsubtitlearry.count);
//    for (MoboSubtitleFrame *frame in msubtitledata.avsubtitlearry) {
////        NSLog(@"%@ ",frame.text);
//    }
}
-(BOOL ) AddSubtitleFile:(NSString *) path
                Filename:(NSString *)name
{
    BOOL isopenok =[self  OpenSubtitleFilePath:(NSString *) path Filename:name];
    
//    NSLog(@" the subtitle count %d",_SubtitleStreamCodecMangeArray.count);
//    NSLog(@"add subtitle file  sucess !!!!");
    return isopenok;
}
- (BOOL) setSelectedSubtitleStream:(NSArray *)selected
{
//    NSLog(@"the   sele ct   subtitle num is %@",selected);
    for (SubtitleDataClass *msubtitledata in _SubtitleStreamCodecMangeArray ) {
        msubtitledata.is_decoding =false;
    }

        for(int i = 0; i<selected.count;i++ )
        {
           NSNumber *n = [selected objectAtIndex:i];

            for (SubtitleDataClass * msubtitledata in _SubtitleStreamCodecMangeArray){
               if( msubtitledata.outsubtitle_index ==  n.integerValue)
               {
                   NSLog(@"the in subtitledata class index: %lu  outdex:%lu ",(unsigned long)msubtitledata.insubtitle_index ,(unsigned long)msubtitledata.outsubtitle_index);
                   msubtitledata.is_decoding =true;
               }
                
            }
        }
        MoboPlayerError errCode = [self openSubtitleStream];
        if (MoboPlayerErrorNone != errCode) {
            LoggerStream(0, @"%@", errorMessage(errCode));
            LoggerStream(0, @"当前 subtitle 打开失败！\n");
            return false;
        }
    
    
    return true;
}

- (BOOL) validAudio
{
    return _audioStream != -1;
}

- (BOOL) validVideo
{
    return _videoStream != -1;
}

- (BOOL) validSubtitles
{
    for (SubtitleDataClass *msubtitledata in _SubtitleStreamCodecMangeArray) {
        if (msubtitledata.is_decoding) {
            return true;
            break;
        }
    }
    return false;
}



- (NSDictionary *) streaminfo
{
    if (!_streaminfo) {
        
        NSMutableDictionary *md = [NSMutableDictionary dictionary];
        
        if (_formatCtx) {
        
            const char *formatName = _formatCtx->iformat->name;
          //  NSLog(@"formatName %s",formatName);
            [md setValue: [NSString stringWithCString:formatName encoding:NSUTF8StringEncoding]
                  forKey: @"format"];
            
            if (_formatCtx->bit_rate) {
                
                [md setValue: [NSNumber numberWithInt:(int)_formatCtx->bit_rate]
                      forKey: @"bitrate"];
            }
            
            if (_formatCtx->metadata) {
                
                NSMutableDictionary *md1 = [NSMutableDictionary dictionary];
                
                AVDictionaryEntry *tag = NULL;
                 while((tag = av_dict_get(_formatCtx->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) {
                     
                     [md1 setValue: [NSString stringWithCString:tag->value encoding:NSUTF8StringEncoding]
                            forKey: [NSString stringWithCString:tag->key encoding:NSUTF8StringEncoding]];
                 }
                
                [md setValue: [md1 copy] forKey: @"metadata"];
            }
        
            char buf[256];
            
            if (_videoStreams.count) {
                NSMutableArray *ma = [NSMutableArray array];
                for (NSNumber *n in _videoStreams) {
                    AVStream *st = _formatCtx->streams[n.integerValue];
                    avcodec_string(buf, sizeof(buf), st->codec, 1);
                    NSString *s = [NSString stringWithCString:buf encoding:NSUTF8StringEncoding];
                    if ([s hasPrefix:@"Video: "])
                        s = [s substringFromIndex:@"Video: ".length];
                    [ma addObject:s];
                }
                md[@"video"] = ma.copy;
            }
            
            if (_audioStreams.count) {
                NSMutableArray *ma = [NSMutableArray array];
                for (NSNumber *n in _audioStreams) {
                    AVStream *st = _formatCtx->streams[n.integerValue];
                    
                    NSMutableString *ms = [NSMutableString string];
                    AVDictionaryEntry *lang = av_dict_get(st->metadata, "language", NULL, 0);
                    if (lang && lang->value) {
                        [ms appendFormat:@"%s ", lang->value];
                    }
                    
                    avcodec_string(buf, sizeof(buf), st->codec, 1);
                    NSString *s = [NSString stringWithCString:buf encoding:NSUTF8StringEncoding];
  //                  NSString * s = [NSString stringWithUTF8String:buf];
                   // NSString *s = [buf stringByRemovingPercentEncoding];
                    if ([s hasPrefix:@"Audio: "])
                        s = [s substringFromIndex:@"Audio: ".length];
                    [ms appendString:s];
                    
                    [ma addObject:ms.copy];
                }                
                md[@"audio"] = ma.copy;
            }
            

                NSMutableArray *ma = [NSMutableArray array];
                for (SubtitleDataClass  *msubtitledata in _SubtitleStreamCodecMangeArray) {
                    NSMutableString *ms = [NSMutableString string];
                    AVDictionaryEntry *lang = av_dict_get(msubtitledata.subtitlestream->metadata, "title", NULL, 0);
                    if (lang && lang->value) {
                        [ms appendFormat:@"%@", [NSString stringWithUTF8String:lang->value]];
                    }
                    NSLog(@"%@",ms);
                    [ma addObject:ms.copy];
                }               
                md[@"subtitles"] = ma.copy;
            
        }
                
        _streaminfo = [md copy];
    }
    return _streaminfo;
}

- (NSString *) videoStreamFormatName
{
    if (!_videoCodecCtx)
        return nil;
    
    if (_videoCodecCtx->pix_fmt == AV_PIX_FMT_NONE)
        return @"";
    
    const char *name = av_get_pix_fmt_name(_videoCodecCtx->pix_fmt);
    return name ? [NSString stringWithCString:name encoding:NSUTF8StringEncoding] : @"?";
}

- (CGFloat) startTime
{
    if (_videoStream != -1) {
        
        AVStream *st = _formatCtx->streams[_videoStream];
        if (AV_NOPTS_VALUE != st->start_time)
            return st->start_time * _videoTimeBase;
        return 0;
    }
    
    if (_audioStream != -1) {
        
        AVStream *st = _formatCtx->streams[_audioStream];
        if (AV_NOPTS_VALUE != st->start_time)
            return st->start_time * _audioTimeBase;
        return 0;
    }
        
    return 0;
}

+ (void)initialize
{
//    av_log_set_callback(FFLog);
    av_register_all();
//    avcodec_register_all();
    avformat_network_init();
    avfilter_register_all();

}

+ (id) movieDecoderWithContentPath: (NSString *) path
                             error: (NSError **) perror
{
    MoboPlayerDecoder *mp = [[MoboPlayerDecoder alloc] init];
    if (mp) {
        [mp openFile:path error:perror];
    }
    return mp;
}

- (void) dealloc
{
    LoggerStream(2, @"%@ dealloc", self);
    [self closeFile];
}

#pragma mark - private

- (BOOL) openFile: (NSString *) path
            error: (NSError **) perror
{
    NSAssert(path, @"nil path");
    NSAssert(!_formatCtx, @"already open");
    _decodelock = [NSLock new];
    _filterlock =[NSLock new];
    _isNetwork = isNetworkPath(path);
    if (_isNetwork) {
        NSLog(@"file  is network file");
    }
    
    static BOOL needNetworkInit = YES;
    if (needNetworkInit && _isNetwork) {
        
        needNetworkInit = NO;
        avformat_network_init();
    }
    _playerspeed =1.0;
    _path = path;
    MoboPlayerError errCode = [self openInput: path];
    if (errCode == MoboPlayerErrorNone) {
        
        MoboPlayerError videoErr = [self openVideoStream];
        MoboPlayerError audioErr = [self openAudioStream];
        if (videoErr != MoboPlayerErrorNone &&
            audioErr != MoboPlayerErrorNone) {
         
            errCode = videoErr; // both fails
            
        } else {
            
        [self collectSubtitleStreams:_formatCtx avmedia:AVMEDIA_TYPE_SUBTITLE];
        
        }
    }
    [self init_filters:"atempo=1.0"];
    if (errCode != MoboPlayerErrorNone) {
        
        [self closeFile];
        NSString *errMsg = errorMessage(errCode);
        LoggerStream(0, @"%@, %@", errMsg, path.lastPathComponent);
        if (perror)
            *perror = moboPlayerError(errCode, errMsg);
        return NO;
    }
        
    return YES;
}
- (void) collectSubtitleStreams:(AVFormatContext *)formatCtx
                        avmedia:(NSInteger)  codecType
{
    int count = 0;
    if(!_SubtitleStreamCodecMangeArray)
    {
        _SubtitleStreamCodecMangeArray =[[NSMutableArray alloc] init];
    }
    else
    {
        count  = (int)[_SubtitleStreamCodecMangeArray count];
    }
    int countnum = 0;
    for (NSInteger i = 0; i < formatCtx->nb_streams; ++i)
    {
        if (codecType == formatCtx->streams[i]->codec->codec_type)
        {
            SubtitleDataClass * msubtitledata  = [[SubtitleDataClass alloc] init];
            msubtitledata.fmt_ctx = formatCtx;
//            msubtitledata.CodecCtx = formatCtx->streams[i]->codec;
           _subtitleCodecCtx = formatCtx->streams[i]->codec;
            msubtitledata.subtitlestream =formatCtx->streams[i];
            msubtitledata.insubtitle_index =i;
            msubtitledata.outsubtitle_index =countnum;
            msubtitledata.is_seaching = false;
            [_SubtitleStreamCodecMangeArray addObject:msubtitledata];
            countnum++;
        }
        
    }
}

- (MoboPlayerError) openInput: (NSString *) path
{
    AVFormatContext *formatCtx = NULL;
    
    if (_interruptCallback) {
        
        formatCtx = avformat_alloc_context();
        if (!formatCtx)
            return MoboPlayerErrorOpenFile;
        
        AVIOInterruptCB cb = {interrupt_callback, (__bridge void *)(self)};
        formatCtx->interrupt_callback = cb;
    }
    if (avformat_open_input(&formatCtx, [path cStringUsingEncoding: NSUTF8StringEncoding], NULL, NULL) < 0) {
        
        if (formatCtx)
            avformat_free_context(formatCtx);
        return MoboPlayerErrorOpenFile;
    }
    if (avformat_find_stream_info(formatCtx, NULL) < 0) {
        
        avformat_close_input(&formatCtx);
        return MoboPlayerErrorStreamInfoNotFound;
    }
    av_dump_format(formatCtx, 0, [path.lastPathComponent cStringUsingEncoding: NSUTF8StringEncoding], false);
    
    _formatCtx = formatCtx;
    return MoboPlayerErrorNone;
}

- (MoboPlayerError) openVideoStream
{
    MoboPlayerError errCode = MoboPlayerErrorStreamNotFound;
    _videoStream = -1;
    _artworkStream = -1;
    _videoStreams = collectStreams(_formatCtx, AVMEDIA_TYPE_VIDEO);
    for (NSNumber *n in _videoStreams) {
        
        const NSUInteger iStream = n.integerValue;

        if (0 == (_formatCtx->streams[iStream]->disposition & AV_DISPOSITION_ATTACHED_PIC)) {
        
            errCode = [self openVideoStream: iStream];
            if (errCode == MoboPlayerErrorNone)
                break;
            
        } else {
            
            _artworkStream = iStream;
        }
    }
    
    return errCode;
}

- (MoboPlayerError) openVideoStream: (NSInteger) videoStream
{

    AVCodecContext *pAVCodecCtx;
    AVCodecParameters *codecpar = _formatCtx->streams[videoStream]->codecpar;
//    SwrContext *swrContext = NULL;
    
    AVCodec *codec = avcodec_find_decoder(codecpar->codec_id);
    if(!codec)
        return MoboPlayerErrorCodecNotFound;
    
    pAVCodecCtx = avcodec_alloc_context3(codec);
    int errorcodec;
    if((errorcodec =avcodec_parameters_to_context(pAVCodecCtx, codecpar))< 0) {
        printf("copy the codec parameters to context fail, err code : %d\n", errorcodec);
        return errorcodec;
    }
    

    
    
    
//    AVCodecContext *codecCtx = _formatCtx->streams[videoStream]->codec;
//
//    AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
//    if (!codec)
//        return MoboPlayerErrorCodecNotFound;
    
    // inform the codec that we can handle truncated bitstreams -- i.e.,
    // bitstreams where frame boundaries can fall in the middle of packets
    //if(codec->capabilities & CODEC_CAP_TRUNCATED)
    //    _codecCtx->flags |= CODEC_FLAG_TRUNCATED;
    
    // open codec
    if (avcodec_open2(pAVCodecCtx, codec, NULL) < 0)
        return MoboPlayerErrorOpenCodec;
        
    _videoFrame = av_frame_alloc();

    if (!_videoFrame) {
        avcodec_close(pAVCodecCtx);
        return MoboPlayerErrorAllocateFrame;
    }
    
    _videoStream = videoStream;
    _videoCodecCtx = pAVCodecCtx;
    
    // determine fps
    
    AVStream *st = _formatCtx->streams[_videoStream];
    avStreamFPSTimeBase(st, 0.04, &_fps, &_videoTimeBase);
    
    LoggerVideo(1, @"video codec size: %lu:%lu fps: %.3f tb: %f",
                (unsigned long)self.frameWidth,
                (unsigned long)self.frameHeight,
                _fps,
                _videoTimeBase);
    
    LoggerVideo(1, @"video start time %f", st->start_time * _videoTimeBase);
    LoggerVideo(1, @"video disposition %d", st->disposition);
    
    return MoboPlayerErrorNone;
}

- (MoboPlayerError) openAudioStream
{
    MoboPlayerError errCode = MoboPlayerErrorStreamNotFound;
    _audioStream = -1;
    _audioStreams = collectStreams(_formatCtx, AVMEDIA_TYPE_AUDIO);
    for (NSNumber *n in _audioStreams) {
    
        errCode = [self openAudioStream: n.integerValue];
        if (errCode == MoboPlayerErrorNone)
            break;
    }    
    return errCode;
}

- (MoboPlayerError) openAudioStream: (NSInteger) audioStream
{
    AVCodecContext *pAVCodecCtx;
    AVCodecParameters *codecpar = _formatCtx->streams[audioStream]->codecpar;
    SwrContext *swrContext = NULL;
    
    AVCodec *codec = avcodec_find_decoder(codecpar->codec_id);
    if(!codec)
        return MoboPlayerErrorCodecNotFound;
    
    pAVCodecCtx = avcodec_alloc_context3(codec);
    int errorcodec;
    if((errorcodec =avcodec_parameters_to_context(pAVCodecCtx, codecpar))< 0) {
        printf("copy the codec parameters to context fail, err code : %d\n", errorcodec);
        return errorcodec;
    }
    
    if (avcodec_open2(pAVCodecCtx, codec, NULL) < 0)
        return MoboPlayerErrorOpenCodec;
    
    if (!audioCodecIsSupported(pAVCodecCtx)) {
        
        id<MoboAudioManager> audioManager = [MoboAudioManager audioManager];
        NSLog(@" numOutputChannels： %d",audioManager.numOutputChannels);
        swrContext = swr_alloc_set_opts(NULL,
                                        av_get_default_channel_layout(audioManager.numOutputChannels),
                                        AV_SAMPLE_FMT_S16,
                                        audioManager.samplingRate,
                                        av_get_default_channel_layout(pAVCodecCtx->channels),
                                        pAVCodecCtx->sample_fmt,
                                        pAVCodecCtx->sample_rate,
                                        0,
                                        NULL);
         
        if (!swrContext ||
            swr_init(swrContext)) {
            
            if (swrContext)
                swr_free(&swrContext);
            avcodec_close(pAVCodecCtx);
            
            return MoboPlayerErroReSampler;
        }
    }
    
    _audioFrame = av_frame_alloc();
<<<<<<< HEAD
    _filt_frame = av_frame_alloc();
=======
//    _filt_frame = av_frame_alloc();
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0

    
    if (!_audioFrame) {
        if (swrContext)
            swr_free(&swrContext);
        avcodec_close(pAVCodecCtx);
        return MoboPlayerErrorAllocateFrame;
    }
    
    _audioStream = audioStream;
    _audioCodecCtx = pAVCodecCtx;
    _swrContext = swrContext;
    
    AVStream *st = _formatCtx->streams[_audioStream];
    avStreamFPSTimeBase(st, 0.025, 0, &_audioTimeBase);
    
    LoggerAudio(1, @"audio codec smr: %.d fmt: %d chn: %d tb: %f %@",
                _audioCodecCtx->sample_rate,
                _audioCodecCtx->sample_fmt,
                _audioCodecCtx->channels,
                _audioTimeBase,
                _swrContext ? @"resample" : @"");
    
    return MoboPlayerErrorNone;
}
//打开外部字幕流
- (MoboPlayerError) openExtermalSubtitleStream
{
    

        SubtitleDataClass * msubtitledata = [_SubtitleStreamCodecMangeArray lastObject];
        
        AVCodecContext *codecCtx = msubtitledata.subtitlestream->codec;
        AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
        if(!codec)
            return MoboPlayerErrorCodecNotFound;
        const AVCodecDescriptor *codecDesc = avcodec_descriptor_get(codecCtx->codec_id);
        if (codecDesc && (codecDesc->props & AV_CODEC_PROP_BITMAP_SUB)) {
            // Only text based subtitles supported
            return MoboPlayerErroUnsupported;
        }
        if (avcodec_open2(codecCtx, codec, NULL) < 0)
            return MoboPlayerErrorOpenCodec;
        NSLog(@"subtitle codec: '%s' mode: %d enc: %s",
              codecDesc->name,
              codecCtx->sub_charenc_mode,
              codecCtx->sub_charenc);
        _subtitleASSEvents = -1;
        
        if (msubtitledata.subtitlestream->codec->subtitle_header_size)
        {
            
            NSString *s = [[NSString alloc] initWithBytes:codecCtx->subtitle_header
                                                   length:codecCtx->subtitle_header_size
                                                 encoding:NSASCIIStringEncoding];
            
            if (s.length)
            {
                
                NSArray *fields = [MoboPlayerSubtitleASSParser parseEvents:s];
                if (fields.count && [fields.lastObject isEqualToString:@"Text"])
                {
                    _subtitleASSEvents = fields.count;
                    LoggerStream(2, @"subtitle ass events: %@", [fields componentsJoinedByString:@","]);
                }
            }
        }
    
    return MoboPlayerErrorNone;
}

- (MoboPlayerError) openSubtitleStream
{
    
    for(int i=0;i<[_SubtitleStreamCodecMangeArray count];i++)
    {
        SubtitleDataClass * msubtitledata = [_SubtitleStreamCodecMangeArray objectAtIndex:i];
        if (msubtitledata.is_seaching) {
            continue;
        }
        AVCodecContext *codecCtx = msubtitledata.subtitlestream->codec;
        AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
        if(!codec)
                    return MoboPlayerErrorCodecNotFound;
        const AVCodecDescriptor *codecDesc = avcodec_descriptor_get(codecCtx->codec_id);
        if (codecDesc && (codecDesc->props & AV_CODEC_PROP_BITMAP_SUB)) {
                    // Only text based subtitles supported
                return MoboPlayerErroUnsupported;
        }
        if (avcodec_open2(codecCtx, codec, NULL) < 0)
                return MoboPlayerErrorOpenCodec;
        NSLog(@"subtitle codec: '%s' mode: %d enc: %s",
                     codecDesc->name,
                     codecCtx->sub_charenc_mode,
                     codecCtx->sub_charenc);
         _subtitleASSEvents = -1;
        
        if (msubtitledata.subtitlestream->codec->subtitle_header_size)
        {
            
            NSString *s = [[NSString alloc] initWithBytes:codecCtx->subtitle_header
                                                   length:codecCtx->subtitle_header_size
                                                 encoding:NSASCIIStringEncoding];
            
            if (s.length)
            {
                
                NSArray *fields = [MoboPlayerSubtitleASSParser parseEvents:s];
                if (fields.count && [fields.lastObject isEqualToString:@"Text"])
                {
                    _subtitleASSEvents = fields.count;
                    LoggerStream(2, @"subtitle ass events: %@", [fields componentsJoinedByString:@","]);
                }
            }
        }
    }
    
//        for(int i=0;i<[_SubtitleStreamCodecMangeArray count];i++)
//        {
//    
//            SubtitleDataClass * msubtitledata = [_SubtitleStreamCodecMangeArray objectAtIndex:i];
//            if (msubtitledata.is_seaching) {
//                continue;
//            }
//            
//            AVCodecContext *codecCtx = msubtitledata.subtitlestream->codec;
// 
//            AVCodec *codec = avcodec_find_decoder(codecCtx->codec_id);
//            if(!codec)
//                return MoboPlayerErrorCodecNotFound;
// 
//            const AVCodecDescriptor *codecDesc = avcodec_descriptor_get(codecCtx->codec_id);
//            if (codecDesc && (codecDesc->props & AV_CODEC_PROP_BITMAP_SUB)) {
//                // Only text based subtitles supported
//                return MoboPlayerErroUnsupported;
//            }
// 
//            if (avcodec_open2(codecCtx, codec, NULL) < 0)
//                return MoboPlayerErrorOpenCodec;
// 
//            _subtitleStream = subtitleStream;
//            _subtitleCodecCtx = codecCtx;
// 
//            LoggerStream(1, @"subtitle codec: '%s' mode: %d enc: %s",
//                         codecDesc->name,
//                         codecCtx->sub_charenc_mode,
//                         codecCtx->sub_charenc);
// 
//            _subtitleASSEvents = -1;
// 
//            if (codecCtx->subtitle_header_size) {
//     
//                NSString *s = [[NSString alloc] initWithBytes:codecCtx->subtitle_header
//                                            length:codecCtx->subtitle_header_size
//                                          encoding:NSASCIIStringEncoding];
//     
//                if (s.length) {
//         
//                    NSArray *fields = [KxMovieSubtitleASSParser parseEvents:s];
//                    if (fields.count && [fields.lastObject isEqualToString:@"Text"]) {
//                        _subtitleASSEvents = fields.count;
//                        LoggerStream(2, @"subtitle ass events: %@", [fields componentsJoinedByString:@","]);
//                    }
//                }
//            }
//        }
    
    return MoboPlayerErrorNone;
}

-(void) closeFile
{
    [self closeAudioStream];
    [self closeVideoStream];
    [self closeSubtitleStream];
    
    _videoStreams = nil;
    _audioStreams = nil;
    _SubtitleStreamCodecMangeArray = nil;
   
    for(SubtitleDataClass * msubtitledata in _SubtitleStreamCodecMangeArray)
    {
        if (msubtitledata.is_seaching) {
            [msubtitledata.avsubtitlearry removeAllObjects];
            msubtitledata.avsubtitlearry =NULL;
//            msubtitledata.fmt_ctx->interrupt_callback.opaque = NULL;
//            msubtitledata.fmt_ctx->interrupt_callback.callback = NULL;
//            avformat_close_input(msubtitledata.fmt_ctx);
//            msubtitledata.fmt_ctx = NULL;
        }
    }
    if (_SubtitleStreamCodecMangeArray) {
        [_SubtitleStreamCodecMangeArray removeAllObjects];
    }
    if (_formatCtx) {
        
        _formatCtx->interrupt_callback.opaque = NULL;
        _formatCtx->interrupt_callback.callback = NULL;
        
        avformat_close_input(&_formatCtx);
        _formatCtx = NULL;
    }
}

- (void) closeVideoStream
{
    _videoStream = -1;
    
//    [self closeScaler];
    
    if (_videoFrame) {
        
        av_free(_videoFrame);
        _videoFrame = NULL;
    }
    
    if (_videoCodecCtx) {
        
        avcodec_close(_videoCodecCtx);
        _videoCodecCtx = NULL;
    }
}

- (void) closeAudioStream
{
    _audioStream = -1;
        
    if (_swrBuffer) {
        
        free(_swrBuffer);
        _swrBuffer = NULL;
        _swrBufferSize = 0;
    }
    
    if (_swrContext) {
        
        swr_free(&_swrContext);
        _swrContext = NULL;
    }
        
    if (_audioFrame) {
        
        av_free(_audioFrame);
        _audioFrame = NULL;
    }
    
    if (_audioCodecCtx) {
        
        avcodec_close(_audioCodecCtx);
        _audioCodecCtx = NULL;
    }
}

- (void) closeSubtitleStream
{
  
    if (_subtitleCodecCtx) {
        
        avcodec_close(_subtitleCodecCtx);
        _subtitleCodecCtx = NULL;
    }
    if (_SubtitleStreamCodecMangeArray) {
        [_SubtitleStreamCodecMangeArray removeAllObjects];
        _SubtitleStreamCodecMangeArray =NULL;
    }
}
//
//- (void) closeScaler
//{
//    if (_swsContext) {
//        sws_freeContext(_swsContext);
//        _swsContext = NULL;
//    }
//    
//    if (_pictureValid) {
//        avpicture_free(&_picture);
//        _pictureValid = NO;
//    }
//    if (_currentvideoFrame) {
//        av_frame_free(&(_currentvideoFrame));
//    }
//}

//- (BOOL) setupScaler
//{
//    [self closeScaler];
//    
//    _pictureValid = avpicture_alloc(&_picture,
//                                    AV_PIX_FMT_RGB24,
//                                    _videoCodecCtx->width,
//                                    _videoCodecCtx->height) == 0;
//    
//    
////    NSLog(@"setupScaler             =========================================");
//    
//	if (!_pictureValid)
//        return NO;
//
//    _currentvideoFrame = av_frame_alloc();
//	_swsContext = sws_getCachedContext(_swsContext,
//                                       _videoCodecCtx->width,
//                                       _videoCodecCtx->height,
//                                       _videoCodecCtx->pix_fmt,
//                                       _videoCodecCtx->width,
//                                       _videoCodecCtx->height,
//                                       AV_PIX_FMT_YUV420P,
//                                       SWS_FAST_BILINEAR,
//                                       NULL, NULL, NULL);
//        
//    return _swsContext != NULL;
//}

- (MoboVideoFrame *) handleVideoFrame
{
    if (!_videoFrame->data[0])
        return nil;
    
    MoboVideoFrame *frame;

//    NSLog(@"_videoFrameFormat: %d MoboVideoFrameFormatYUV:%d",_videoFrameFormat,MoboVideoFrameFormatYUV);
    if (_videoFrameFormat == MoboVideoFrameFormatYUV) {
            
        MoboVideoFrameYUV * yuvFrame = [[MoboVideoFrameYUV alloc] init];
        
        yuvFrame.luma = copyFrameData(_videoFrame->data[0],
                                      _videoFrame->linesize[0],
                                      _videoCodecCtx->width,
                                      _videoCodecCtx->height);
        
        yuvFrame.chromaB = copyFrameData(_videoFrame->data[1],
                                         _videoFrame->linesize[1],
                                         _videoCodecCtx->width / 2,
                                         _videoCodecCtx->height / 2);
        
        yuvFrame.chromaR = copyFrameData(_videoFrame->data[2],
                                         _videoFrame->linesize[2],
                                         _videoCodecCtx->width / 2,
                                         _videoCodecCtx->height / 2);
//        NSLog(@" this is  yuv frame");
        frame = yuvFrame;
    
    } else {
        AVPicture scale_picture;
        avpicture_alloc(&scale_picture, AV_PIX_FMT_RGB24, _videoCodecCtx->width, _videoCodecCtx->height);
        _swsContext =sws_getCachedContext(_swsContext,_videoCodecCtx->width,_videoCodecCtx->height,_videoCodecCtx->pix_fmt,
                                        _videoCodecCtx->width,_videoCodecCtx->height,AV_PIX_FMT_RGB24,SWS_FAST_BILINEAR,NULL, NULL, NULL);

        sws_scale(_swsContext,
                  (const uint8_t **)_videoFrame->data,
                  _videoFrame->linesize,
                  0,
                  _videoCodecCtx->height,
                  scale_picture.data,
                  scale_picture.linesize);
        MoboVideoFrameRGB *rgbFrame = [[MoboVideoFrameRGB alloc] init];
        
        rgbFrame.linesize = scale_picture.linesize[0];
        rgbFrame.rgb = [NSData dataWithBytes:scale_picture.data[0]
                                      length:rgbFrame.linesize * _videoCodecCtx->height];

        frame = rgbFrame;
        

        avpicture_free(&scale_picture);
        
    }
    
    frame.width = _videoCodecCtx->width;
    frame.height = _videoCodecCtx->height;
    frame.position = av_frame_get_best_effort_timestamp(_videoFrame) * _videoTimeBase/_playerspeed;
    
    const int64_t frameDuration = av_frame_get_pkt_duration(_videoFrame)/_playerspeed;
//    NSLog(@"frameDuration  : %d",_videoFrame->repeat_pict);
    if (frameDuration) {
        
        frame.duration = frameDuration * _videoTimeBase;
//        frame.duration += _videoFrame->repeat_pict * _videoTimeBase * 0.5;
        
//        frame.duration =frame.duration/_playerspeed;
//        NSLog(@" duration :%.4f ",_playerspeed);
    } else {
        
        // sometimes, ffmpeg unable to determine a frame duration
        // as example yuvj420p stream from web camera
        frame.duration = 1.0 / _fps;
    }    
    
#if 0
    LoggerVideo(2, @"VFD: %.4f %.4f | %lld ",
                frame.position,
                frame.duration,
                av_frame_get_pkt_pos(_videoFrame));
#endif
    
    return frame;
}
int mobo_av_opt_set_int_list(void *obj, const char *name, uint8_t *val, uint64_t term, int search_flags){
    return av_int_list_length_for_size(sizeof(*(val)), val, term) > INT_MAX / sizeof(*(val)) ?
    AVERROR(EINVAL) :
    av_opt_set_bin(obj, name, (const uint8_t *)(val),av_int_list_length_for_size(sizeof(*(val)), val, term) * sizeof(*(val)), search_flags);
}
- (NSInteger) init_filters: (char * )filters_descr
{
<<<<<<< HEAD
    [_filterlock lock];
=======
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    char args[512];
    int ret = 0;
    AVFilter *abuffersrc  = avfilter_get_by_name("abuffer");
    AVFilter *abuffersink = avfilter_get_by_name("abuffersink");
    AVFilterInOut *outputs = avfilter_inout_alloc();
    AVFilterInOut *inputs  = avfilter_inout_alloc();
<<<<<<< HEAD
    enum AVSampleFormat out_sample_fmts[] = {_audioCodecCtx->sample_fmt, -1 };
//    id<MoboAudioManager> audioManager = [MoboAudioManager audioManager];
    const int64_t num1  =av_get_default_channel_layout(_audioCodecCtx->channels);
    const Float64 num2 =_audioCodecCtx->sample_rate;
   

    
//    NSLog(@" init  func  num1 num2 %lld ,%.4f |||||||||||||||||||||",num1,num2);
    int64_t out_channel_layouts[] = {num1, -1 };
    int out_sample_rates[] = {num2, -1 };
    
    const AVFilterLink *outlink;
    
    

    
=======
    static const enum AVSampleFormat out_sample_fmts[] = { AV_SAMPLE_FMT_S16, -1 };
    static const int64_t out_channel_layouts[] = { AV_CH_LAYOUT_MONO, -1 };
    static const int out_sample_rates[] = { 8000, -1 };
    const AVFilterLink *outlink;
    
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    AVRational time_base = _formatCtx->streams[_audioStream]->time_base;
//    AVRational time_base = _audioCodecCtx->time_base;
    if(!filter_graph)
    filter_graph = avfilter_graph_alloc();
    if (!outputs || !inputs || !filter_graph) {
        ret = AVERROR(ENOMEM);
        goto end;
    }
    
    /* buffer audio source: the decoded frames from the decoder will be inserted here. */
<<<<<<< HEAD
    
=======
    if (!_audioCodecCtx->channel_layout)
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
        _audioCodecCtx->channel_layout = av_get_default_channel_layout(_audioCodecCtx->channels);
    snprintf(args, sizeof(args),"time_base=%d/%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%"PRIx64,time_base.num, time_base.den, _audioCodecCtx->sample_rate,
             av_get_sample_fmt_name(_audioCodecCtx->sample_fmt), _audioCodecCtx->channel_layout);
    
//    NSLog(@" args %s ==",args);
    ret = avfilter_graph_create_filter(&buffersrc_ctx, abuffersrc, "in",args, NULL, filter_graph);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot create audio buffer source\n");
        NSLog(@" tghe  rert  %d",ret);
        goto end;
    }
    
    /* buffer audio sink: to terminate the filter chain. */
    ret = avfilter_graph_create_filter(&buffersink_ctx, abuffersink, "out",
                                       NULL, NULL, filter_graph);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot create audio buffer sink\n");
        NSLog(@"Cannot create audio buffer sink");
        goto end;
    }
    
    ret = av_opt_set_int_list(buffersink_ctx, "sample_fmts", out_sample_fmts, -1,
                              AV_OPT_SEARCH_CHILDREN);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot set output sample format\n");
        NSLog(@"Cannot set output sample format");
        goto end;
    }
    
    ret = av_opt_set_int_list(buffersink_ctx, "channel_layouts", out_channel_layouts, -1,
                              AV_OPT_SEARCH_CHILDREN);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot set output channel layout\n");
        NSLog(@"Cannot set output channel layout");
        goto end;
    }
    
    ret = av_opt_set_int_list(buffersink_ctx, "sample_rates", out_sample_rates, -1,
                              AV_OPT_SEARCH_CHILDREN);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot set output sample rate\n");
        NSLog(@"Cannot set output sample rate");
        goto end;
    }
    
    /*
     * Set the endpoints for the filter graph. The filter_graph will
     * be linked to the graph described by filters_descr.
     */
    
    /*
     * The buffer source output must be connected to the input pad of
     * the first filter described by filters_descr; since the first
     * filter input label is not specified, it is set to "in" by
     * default.
     */
    outputs->name       = av_strdup("in");
    outputs->filter_ctx = buffersrc_ctx;
    outputs->pad_idx    = 0;
    outputs->next       = NULL;
    
    /*
     * The buffer sink input must be connected to the output pad of
     * the last filter described by filters_descr; since the last
     * filter output label is not specified, it is set to "out" by
     * default.
     */
    inputs->name       = av_strdup("out");
    inputs->filter_ctx = buffersink_ctx;
    inputs->pad_idx    = 0;
    inputs->next       = NULL;
    
//    if ((ret = avfilter_graph_parse_ptr(filter_graph, "",
//                                        &inputs, &outputs, NULL)) < 0)
<<<<<<< HEAD
    if ((ret = avfilter_graph_parse_ptr(filter_graph, filters_descr,
=======
    if ((ret = avfilter_graph_parse_ptr(filter_graph, "atempo=1.5",
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
                                        &inputs, &outputs, NULL)) < 0)
        goto end;
    
    if ((ret = avfilter_graph_config(filter_graph, NULL)) < 0)
        goto end;
    
    /* Print summary of the sink buffer
     * Note: args buffer is reused to store channel layout string */
<<<<<<< HEAD
//    outlink = buffersink_ctx->inputs[0];
//    av_get_channel_layout_string(args, sizeof(args), -1, outlink->channel_layout);
//    av_log(NULL, AV_LOG_INFO, "Output: srate:%dHz fmt:%s chlayout:%s\n",
//           (int)outlink->sample_rate,
//           (char *)av_x_if_null(av_get_sample_fmt_name(outlink->format), "?"),
//           args);
//    NSLog(@" thi  is             sucess  =======                  ===== = == = = == = = =");
end:
    avfilter_inout_free(&inputs);
    avfilter_inout_free(&outputs);
     [_filterlock unlock];
=======
    outlink = buffersink_ctx->inputs[0];
    av_get_channel_layout_string(args, sizeof(args), -1, outlink->channel_layout);
    av_log(NULL, AV_LOG_INFO, "Output: srate:%dHz fmt:%s chlayout:%s\n",
           (int)outlink->sample_rate,
           (char *)av_x_if_null(av_get_sample_fmt_name(outlink->format), "?"),
           args);
    
end:
    avfilter_inout_free(&inputs);
    avfilter_inout_free(&outputs);
    
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    return ret;
}
static void print_frame(const AVFrame *frame)
{
    const int n = frame->nb_samples * av_get_channel_layout_nb_channels(av_frame_get_channel_layout(frame));
    const uint16_t *p     = (uint16_t*)frame->data[0];
    const uint16_t *p_end = p + n;
    
    while (p < p_end) {
        fputc(*p    & 0xff, stdout);
        fputc(*p>>8 & 0xff, stdout);
        p++;
    }
    fflush(stdout);
}
- (MoboAudioFrame *) handleAudioFrame
{
    if (!_audioFrame->data[0])
        return nil;
<<<<<<< HEAD

=======
//    if(!_filt_frame)
//        _filt_frame =av_frame_alloc();
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    id<MoboAudioManager> audioManager = [MoboAudioManager audioManager];
    const NSUInteger numChannels = audioManager.numOutputChannels;
    NSInteger numFrames;
    int ret= 0;
    void * audioData;
    
    if(_playerspeed != 1 ){
<<<<<<< HEAD
        [_filterlock lock];
//        NSLog(@" the  _playerspeed:%.4f",_playerspeed);
//         if (av_buffersrc_add_frame_flags(buffersrc_ctx, _audioFrame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
        if (av_buffersrc_write_frame(buffersrc_ctx, _audioFrame) < 0) {
            av_log(NULL, AV_LOG_ERROR, "Error while feeding the audio filtergraph\n");
            [_filterlock unlock];
            return NULL;
        }
//        sleep(10);
//        NSLog(@" this  is  a befor  recive  frame");
        /* pull filtered audio from the filtergraph */
        while (1) {
            ret = av_buffersink_get_frame(buffersink_ctx, _filt_frame);
            if (ret >= 0)
            {
//                 NSLog(@" this  is  a good frame");
                
             //   print_frame(_audioFrame);
                break ;
            }
            if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
            {
//                NSLog(@" this  is  a bad frame");
                [_filterlock unlock];
                return NULL ;
            }
            
            if(ret <0)
                
                continue;
            
        }
        [_filterlock unlock];
     //    NSLog(@" the  frame duration :%lld",_audioFrame->pkt_duration);
    }else {
        _filt_frame = _audioFrame;
=======
        //        		CMP_LockMutex(coredata->atempo_filter_mutex);
        if (av_buffersrc_add_frame_flags(buffersrc_ctx, _audioFrame, 0) < 0) {
            av_log(NULL, AV_LOG_ERROR, "Error while feeding the audio filtergraph\n");
            return NULL;
        }
        
        /* pull filtered audio from the filtergraph */
        while (1) {
            ret = av_buffersink_get_frame(buffersink_ctx, _audioFrame);
            if (ret >= 0)
            {
             //   print_frame(_audioFrame);
                break ;
            }
//            NSLog(@" the ret: %d",ret);
            if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
                return NULL ;
            if(ret <0)
                continue;
            
        }
     //    NSLog(@" the  frame duration :%lld",_audioFrame->pkt_duration);
    }else {
//        _filt_frame = _audioFrame;
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    }
    
    if (_swrContext) {
        
        const NSUInteger ratio = MAX(1, audioManager.samplingRate / _audioCodecCtx->sample_rate) *
                                 MAX(1, audioManager.numOutputChannels / _audioCodecCtx->channels) * 2;
<<<<<<< HEAD
//        NSLog(@" integer ratio %d",ratio);
=======
        NSLog(@" integer ratio %d",ratio);
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
        const int bufSize = av_samples_get_buffer_size(NULL,
                                                       audioManager.numOutputChannels,
                                                       (int)(_filt_frame->nb_samples * ratio),
                                                       AV_SAMPLE_FMT_S16,
                                                       1);
<<<<<<< HEAD
//        const int bufSize = av_samples_get_buffer_size(NULL,
//                                                       audioManager.numOutputChannels,
//                                                       (int)(_filt_frame->nb_samples),
//                                                       AV_SAMPLE_FMT_S16,
//                                                       1);
        
//      ≠≠   NSLog(@" buff size audioManager.numOutputChannels  _audioFrame->nb_samples ,%d,%d,%d  ",bufSize,audioManager.numOutputChannels,_filt_frame->nb_samples);
=======
        
        
         NSLog(@" buff size audioManager.numOutputChannels  _audioFrame->nb_samples ,%d,%d,%d  ",bufSize,audioManager.numOutputChannels,_audioFrame->nb_samples);
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
     //   NSLog(@" this  is in handle audio frame");
        if (!_swrBuffer || _swrBufferSize < bufSize) {
            _swrBufferSize = bufSize;
            _swrBuffer = realloc(_swrBuffer, _swrBufferSize);
//            NSLog(@" thi  is   in  relloc" );
        }
        
        
//        NSLog(@" the_swrBuffer buffsize % ",_swrBuffer->length);
        Byte *outbuf[2] = { _swrBuffer, 0 };
        
<<<<<<< HEAD
        numFrames = swr_convert(_swrContext,outbuf,(int)(_filt_frame->nb_samples* ratio),
                                ( const uint8_t **)_filt_frame->data,
                                _filt_frame->nb_samples);
=======
        if ((uint8_t **)_audioFrame->data==nil) {
//            NSLog(@"      ")
            return NULL;
        }
        numFrames = swr_convert(_swrContext,outbuf,(int)(_audioFrame->nb_samples * ratio),
                                (const uint8_t **)_audioFrame->data,
                                _audioFrame->nb_samples);
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
        
        if (numFrames < 0) {
            LoggerAudio(0, @"fail resample audio");
            return nil;
        }
        audioData = _swrBuffer;
        
    } else {
    
        if (_audioCodecCtx->sample_fmt != AV_SAMPLE_FMT_S16) {
            NSAssert(false, @"bucheck, audio format is invalid");
            return nil;
        }
        
        audioData = _filt_frame->data[0];
        numFrames = _filt_frame->nb_samples;
    }
    
    const NSUInteger numElements = numFrames * numChannels;
    NSMutableData *data = [NSMutableData dataWithLength:numElements * sizeof(float)];
    
    float scale = 1.0 / (float)INT16_MAX ;
    vDSP_vflt16((SInt16 *)audioData, 1, data.mutableBytes, 1, numElements);
    vDSP_vsmul(data.mutableBytes, 1, &scale, data.mutableBytes, 1, numElements);
    
    MoboAudioFrame *frame = [[MoboAudioFrame alloc] init];
<<<<<<< HEAD
//    NSLog(@"handle _playerspeed %.4f",_playerspeed);
    frame.position = av_frame_get_best_effort_timestamp(_filt_frame) * _audioTimeBase/_playerspeed;
    frame.duration = av_frame_get_pkt_duration(_filt_frame) * _audioTimeBase/_playerspeed;
=======
    frame.position = av_frame_get_best_effort_timestamp(_audioFrame) * _audioTimeBase/_playerspeed;
    frame.duration = av_frame_get_pkt_duration(_audioFrame) * _audioTimeBase/_playerspeed;
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    frame.samples = data;
    
    if (frame.duration == 0) {
        // sometimes ffmpeg can't determine the duration of audio frame
        // especially of wma/wmv format
        // so in this case must compute duration
<<<<<<< HEAD
        frame.duration = frame.samples.length / (sizeof(float) * numChannels * audioManager.samplingRate/_playerspeed);
//        frame.duration =frame.duration;
    }
//     av_frame_unref(_filt_frame);
//    _filt_frame =nil;
#ifdef DEBUG
=======
        frame.duration = frame.samples.length / (sizeof(float) * numChannels * audioManager.samplingRate);
        frame.duration =frame.duration/_playerspeed;
    }
//     av_frame_unref(_filt_frame);
//    _audioFrame =nil;
#ifdef DEBUG1
>>>>>>> 3938e94d2c955167dabece781fdd852e5081ffa0
    LoggerAudio(2, @"AFD: %.4f %.4f | %.4f  | %f | %.4f",
                frame.position,
                frame.duration,
                frame.samples.length / (8.0 * 44100.0),
                audioManager.samplingRate,
                _audioTimeBase
                );
    
    NSLog(@"===========================");
#endif
    
    return frame;
}

- (MoboSubtitleFrame *) handleSubtitle: (AVSubtitle *)pSubtitle
{
    NSMutableString *ms = [NSMutableString string];
    
    for (NSUInteger i = 0; i < pSubtitle->num_rects; ++i) {
       
        AVSubtitleRect *rect = pSubtitle->rects[i];
        if (rect) {
            
            if (rect->text) { // rect->type == SUBTITLE_TEXT
                
                NSString *s = [NSString stringWithUTF8String:rect->text];
                if (s.length) [ms appendString:s];
                
            } else if (rect->ass ) {
                NSString *s = [NSString stringWithUTF8String:rect->ass];
                
                if (s.length) {
                    NSArray *fields = [MoboPlayerSubtitleASSParser parseDialogue:s numFields:_subtitleASSEvents];
                    if (fields.count && [fields.lastObject length]) {
                       
                        s = [MoboPlayerSubtitleASSParser removeCommandsFromEventText: fields.lastObject];
                        if (s.length) [ms appendString:s];
                    }                    
                }
            }
        }
    }
    
    if (!ms.length)
        return nil;
    
    MoboSubtitleFrame *frame = [[MoboSubtitleFrame alloc] init];
    frame.text = [ms copy];   
    frame.position = pSubtitle->pts / AV_TIME_BASE + pSubtitle->start_display_time;
    frame.position = frame.position /_playerspeed;
    frame.duration = (CGFloat)(pSubtitle->end_display_time - pSubtitle->start_display_time) / 1000.f;
    frame.duration =frame.duration/_playerspeed;
    
#if 0
    LoggerStream(2, @"SUB: %.4f %.4f | %@",
          frame.position,
          frame.duration,
          frame.text);
#endif
    
    return frame;    
}

- (BOOL) interruptDecoder
{
    if (_interruptCallback)
        return _interruptCallback();
    return NO;
}

#pragma mark - public

- (BOOL) setupVideoFrameFormat: (MoboVideoFrameFormat) format
{
    if (format == MoboVideoFrameFormatYUV &&
        _videoCodecCtx &&
        (_videoCodecCtx->pix_fmt == AV_PIX_FMT_YUV420P || _videoCodecCtx->pix_fmt == AV_PIX_FMT_YUVJ420P )) {
        
        _videoFrameFormat = MoboVideoFrameFormatYUV;
        return YES;
    }
    
    _videoFrameFormat = MoboVideoFrameFormatRGB;
    return _videoFrameFormat == format;
}
-(void) setspeedfilter:(Float64) speed
{
    _playerspeed = speed;
    char buf[20]="";
    sprintf(buf,"atempo=%f",speed);
    [self init_filters:buf];
}
- (NSArray *) decodeFrames: (CGFloat) minDuration
{
    
    [_decodelock lock];
    
    if (_videoStream == -1 &&
        _audioStream == -1)
    {
        [_decodelock unlock];
        return nil;
    }
    

    NSMutableArray *resultframes = [NSMutableArray array];
    AVPacket packet;
    
    CGFloat decodedDuration = 0;
    
    BOOL finished = NO;
    
    while (!finished) {
        
        if (av_read_frame(_formatCtx, &packet) < 0) {
            _isEOF = YES;
            break;
        }
        if (packet.stream_index ==_videoStream) {
            int len = avcodec_send_packet(_videoCodecCtx, &packet);
            
            do {
                len  = avcodec_receive_frame(_videoCodecCtx, _videoFrame);
                if(len == 0)
                {
                    MoboVideoFrame *frame = [self handleVideoFrame];
                    if (frame) {
                        [resultframes addObject:frame];
                        _position = frame.position;
                        decodedDuration += frame.duration;
                    }
                }
                
            } while (len == 0);
            if (decodedDuration > minDuration)
                finished = YES;
            
        } else if (packet.stream_index == _audioStream) {
                        
            int len = avcodec_send_packet(_audioCodecCtx, &packet);
            
            do {
                len  = avcodec_receive_frame(_audioCodecCtx, _audioFrame);
                if(len == 0)
                {
                    MoboAudioFrame * frame = [self handleAudioFrame];
                    if (frame) {
                        
                        [resultframes addObject:frame];
                        
                        if (_videoStream == -1) {
                            
                            _position = frame.position;
                            decodedDuration += frame.duration;
                        }
                    }
                }
                
            } while (len == 0);
            if (decodedDuration > minDuration)
                finished = YES;
        } else if (packet.stream_index == _artworkStream) {
            
            if (packet.size) {

                MoboArtworkFrame *frame = [[MoboArtworkFrame alloc] init];
                frame.picture = [NSData dataWithBytes:packet.data length:packet.size];
                [resultframes addObject:frame];
            }
            
        } else
        {
            
            for (SubtitleDataClass * msubtitledata in _SubtitleStreamCodecMangeArray)
            {
                
                if (msubtitledata.is_decoding == true )
                {
                    
                    if (!msubtitledata.is_seaching) {
                        if (packet.stream_index  != msubtitledata.insubtitle_index) {
                            continue;
                        }
                        int pktSize = packet.size;
                        
                        while (pktSize > 0)
                        {
                            
                            AVSubtitle subtitle;
                            int gotsubtitle = 0;
                            int len = avcodec_decode_subtitle2(msubtitledata.subtitlestream->codec,
                                                               &subtitle,
                                                               &gotsubtitle,
                                                               &packet);
                            
                            if (len < 0) {
                                LoggerStream(0, @"decode subtitle error, skip packet");
                                break;
                            }
                            
                            if (gotsubtitle) {
                                
                                MoboSubtitleFrame *frame = [self handleSubtitle: &subtitle];
                                if (frame) {
                                    [resultframes addObject:frame];
                                }
                                avsubtitle_free(&subtitle);
                            }
                            if (0 == len)
                                break;
                            pktSize -= len;
                        }
                    }
//                    else{
//                        
//                        for (MoboSubtitleFrame *frame in msubtitledata.avsubtitlearry) {
//                            
//                            if () {
//                                <#statements#>
//                            }
//                        }
//                        
//                    }
                    
                }
            }
        }

        av_packet_unref(&packet);
	}
    
    [_decodelock unlock];
    return resultframes;
}

@end

//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////

static int interrupt_callback(void *ctx)
{
    if (!ctx)
        return 0;
    __unsafe_unretained MoboPlayerDecoder *p = (__bridge MoboPlayerDecoder *)ctx;
    const BOOL r = [p interruptDecoder];
    if (r) LoggerStream(1, @"DEBUG: INTERRUPT_CALLBACK!");
    return r;
}

//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////

@implementation MoboPlayerSubtitleASSParser

+ (NSArray *) parseEvents: (NSString *) events
{
    NSRange r = [events rangeOfString:@"[Events]"];
    if (r.location != NSNotFound) {
        
        NSUInteger pos = r.location + r.length;
        
        r = [events rangeOfString:@"Format:"
                          options:0
                            range:NSMakeRange(pos, events.length - pos)];
        
        if (r.location != NSNotFound) {
            
            pos = r.location + r.length;
            r = [events rangeOfCharacterFromSet:[NSCharacterSet newlineCharacterSet]
                                        options:0
                                          range:NSMakeRange(pos, events.length - pos)];
            
            if (r.location != NSNotFound) {
                
                NSString *format = [events substringWithRange:NSMakeRange(pos, r.location - pos)];
                NSArray *fields = [format componentsSeparatedByString:@","];
                if (fields.count > 0) {
                    
                    NSCharacterSet *ws = [NSCharacterSet whitespaceCharacterSet];
                    NSMutableArray *ma = [NSMutableArray array];
                    for (NSString *s in fields) {
                        [ma addObject:[s stringByTrimmingCharactersInSet:ws]];
                    }
                    return ma;
                }
            }
        }
    }
    
    return nil;
}

+ (NSArray *) parseDialogue: (NSString *) dialogue
                  numFields: (NSUInteger) numFields
{
    if ([dialogue hasPrefix:@"Dialogue:"]) {
        
        NSMutableArray *ma = [NSMutableArray array];
        
        NSRange r = {@"Dialogue:".length, 0};
        NSUInteger n = 0;
        
        while (r.location != NSNotFound && n++ < numFields) {
            
            const NSUInteger pos = r.location + r.length;
            
            r = [dialogue rangeOfString:@","
                                options:0
                                  range:NSMakeRange(pos, dialogue.length - pos)];
            
            const NSUInteger len = r.location == NSNotFound ? dialogue.length - pos : r.location - pos;
            NSString *p = [dialogue substringWithRange:NSMakeRange(pos, len)];
            p = [p stringByReplacingOccurrencesOfString:@"\\N" withString:@"\n"];
            [ma addObject: p];
        }
        
        return ma;
    }
    
    return nil;
}

+ (NSString *) removeCommandsFromEventText: (NSString *) text
{
    NSMutableString *ms = [NSMutableString string];
    
    NSScanner *scanner = [NSScanner scannerWithString:text];
    while (!scanner.isAtEnd) {
        
        NSString *s;
        if ([scanner scanUpToString:@"{\\" intoString:&s]) {
            
            [ms appendString:s];
        }
        
        if (!([scanner scanString:@"{\\" intoString:nil] &&
              [scanner scanUpToString:@"}" intoString:nil] &&
              [scanner scanString:@"}" intoString:nil])) {
            
            break;
        }
    }
    
    return ms;
}

@end

static void FFLog(void* context, int level, const char* format, va_list args) {
    @autoreleasepool {
        //Trim time at the beginning and new line at the end
        NSString* message = [[NSString alloc] initWithFormat: [NSString stringWithUTF8String: format] arguments: args];
        switch (level) {
            case 0:
            case 1:
                LoggerStream(0, @"%@", [message stringByTrimmingCharactersInSet:[NSCharacterSet newlineCharacterSet]]);
                break;
            case 2:
                LoggerStream(1, @"%@", [message stringByTrimmingCharactersInSet:[NSCharacterSet newlineCharacterSet]]);
                break;
            case 3:
            case 4:
                LoggerStream(2, @"%@", [message stringByTrimmingCharactersInSet:[NSCharacterSet newlineCharacterSet]]);
                break;
            default:
                LoggerStream(3, @"%@", [message stringByTrimmingCharactersInSet:[NSCharacterSet newlineCharacterSet]]);
                break;
        }
    }
}

