#import "RTSPPlayer.h"
#import "Utilities.h"
#import "AudioStreamer.h"

#ifndef AVCODEC_MAX_AUDIO_FRAME_SIZE
# define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
#endif

@interface RTSPPlayer ()
@property (nonatomic, retain) AudioStreamer *audioController;
@end

@interface RTSPPlayer (private)
-(void)convertFrameToRGB;
-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height;
-(void)savePicture:(AVPicture)pFrame width:(int)width height:(int)height index:(int)iFrame;
-(void)setupScaler;
@end

@implementation RTSPPlayer

@synthesize audioController = _audioController;
@synthesize audioPacketQueue,audioPacketQueueSize;
@synthesize _audioStream,_audioCodecContext;
@synthesize emptyAudioBuffer;

@synthesize outputWidth, outputHeight;

- (void)setOutputWidth:(int)newValue
{
    if (outputWidth != newValue) {
        outputWidth = newValue;
        [self setupScaler];
    }
}

- (void)setOutputHeight:(int)newValue
{
    if (outputHeight != newValue) {
        outputHeight = newValue;
        [self setupScaler];
    }
}

- (UIImage *)currentImage
{
    if (!pFrame->data[0]) return nil;
    [self convertFrameToRGB];
    return [self imageFromAVPicture:picture width:outputWidth height:outputHeight];
}

- (double)duration
{
    return (double)pFormatCtx->duration / AV_TIME_BASE;
}

- (double)currentTime
{
    AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
    return packet.pts * (double)timeBase.num / timeBase.den;
}

- (int)sourceWidth
{
    return pCodecCtx->width;
}

- (int)sourceHeight
{
    return pCodecCtx->height;
}

- (id)initWithVideo:(NSString *)moviePath usesTcp:(BOOL)usesTcp
{
    if (!(self=[super init])) return nil;
 
    AVCodec         *pCodec;
        
    // Register all formats and codecs
    avcodec_register_all();
    av_register_all();
    avformat_network_init();
    
    // Set the RTSP Options
    AVDictionary *opts = 0;
    if (usesTcp)
        av_dict_set(&opts, "rtsp_transport", "tcp", 0);

    
    if (avformat_open_input(&pFormatCtx, [moviePath UTF8String], NULL, &opts) !=0 ) {
        av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
        goto initError;
    }
    
    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx,NULL) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Couldn't find stream information\n");
        goto initError;
    }
    
    // Find the first video stream
    videoStream=-1;
    audioStream=-1;

    for (int i=0; i<pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
            NSLog(@"found video stream");
            videoStream=i;
        }
        
        if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
            audioStream=i;
            NSLog(@"found audio stream");
        }
    }
    
    if (videoStream==-1 && audioStream==-1) {
        goto initError;
    }

    // Get a pointer to the codec context for the video stream
    pCodecCtx = pFormatCtx->streams[videoStream]->codec;
    
    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec == NULL) {
        av_log(NULL, AV_LOG_ERROR, "Unsupported codec!\n");
        goto initError;
    }
    
    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot open video decoder\n");
        goto initError;
    }
    
    if (audioStream > -1 ) {
        NSLog(@"set up audiodecoder");
        [self setupAudioDecoder];
    }
    
    // Allocate video frame
    pFrame = avcodec_alloc_frame();
    NSLog(@"initWithVideo=========================");
            
    outputWidth = pCodecCtx->width;
    self.outputHeight = pCodecCtx->height;
    
    pthread_mutex_init(&flag_end_record, NULL);
    
    outputAVFormatContext = NULL;
    output_audio_stream = NULL;
    output_video_stream = NULL;
    recordVideoDTS=0;
    recordAudioDTS=0;
    bsfc = NULL;


    return self;
    
initError:
    [self release];
    return nil;
}


- (void)setupScaler
{
    // Release old picture and scaler
    avpicture_free(&picture);
    sws_freeContext(img_convert_ctx);
    
    // Allocate RGB picture
    avpicture_alloc(&picture, PIX_FMT_RGB24, outputWidth, outputHeight);
    
    // Setup scaler
    static int sws_flags =  SWS_FAST_BILINEAR;
    img_convert_ctx = sws_getContext(pCodecCtx->width,
                                     pCodecCtx->height,
                                     pCodecCtx->pix_fmt,
                                     outputWidth,
                                     outputHeight,
                                     PIX_FMT_RGB24,
                                     sws_flags, NULL, NULL, NULL);
    
}

- (void)seekTime:(double)seconds
{
    AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
    int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
    avformat_seek_file(pFormatCtx, videoStream, targetFrame, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
    avcodec_flush_buffers(pCodecCtx);
}

- (void)dealloc
{
    // Free scaler
    sws_freeContext(img_convert_ctx);

    // Free RGB picture
    avpicture_free(&picture);
    
    // Free the packet that was allocated by av_read_frame
    av_free_packet(&packet);
    
    // Free the YUV frame
    av_free(pFrame);
    
    // Close the codec
    if (pCodecCtx) avcodec_close(pCodecCtx);
    
    // Close the video file
    if (pFormatCtx) avformat_close_input(&pFormatCtx);

    [_audioController _stopAudio];
    [_audioController release];
    _audioController = nil;
    
    [audioPacketQueue release];
    audioPacketQueue = nil;
    
    [audioPacketQueueLock release];
    audioPacketQueueLock = nil;
    pthread_mutex_destroy(&flag_end_record);
    
    [super dealloc];
}

- (BOOL)stepFrame
{
    // AVPacket packet;
    int frameFinished=0;

    while (!frameFinished && av_read_frame(pFormatCtx, &packet) >=0 ) {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream) {
            //....
//            avco
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
            [self saveRtspVideo];
            
            
        }
        else
        {
            
        }
        av_free_packet(&packet);
        //        if (packet.stream_index==audioStream) {
//            // NSLog(@"audio stream");
//            [audioPacketQueueLock lock];
//
//            audioPacketQueueSize += packet.size;
//            [audioPacketQueue addObject:[NSMutableData dataWithBytes:&packet length:sizeof(packet)]];
//
//            [audioPacketQueueLock unlock];
//
//            if (!primed) {
//                primed=YES;
//                [_audioController _startAudio];
//            }
//
//            if (emptyAudioBuffer) {
//                [_audioController enqueueBuffer:emptyAudioBuffer];
//            }
//        }
    }
//    NSLog(@"while end");
    
    return frameFinished!=0;
}

#pragma mark 处理视频录制，保存到沙盒中。
- (void)closeRecord {
    pthread_mutex_lock(&flag_end_record);
    if (NULL != outputAVFormatContext)
    {
        //output_video_stream->time_base.den = 25;
        //output_video_stream->time_base.num = 1;
        //output_video_stream->duration =
        av_write_trailer(outputAVFormatContext);

        if (outputAVFormatContext && !(outputAVFormatContext->oformat->flags & AVFMT_NOFILE))
        {
            avio_close(outputAVFormatContext->pb);// for new : avio_close(m_pOc->pb);
            outputAVFormatContext->pb = NULL;
        }

        if (NULL != bsfc)
        {
            av_bitstream_filter_close(bsfc);
            bsfc = NULL;
        }

        avformat_close_input(&outputAVFormatContext);
        avformat_free_context(outputAVFormatContext);
        outputAVFormatContext = NULL;
        
//        NSLog(@"结束录制 fileName= %@", fileName);
//        BOOL videoCompatible = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(fileName);
    }
    pthread_mutex_unlock(&flag_end_record);
}



//int add_stream(AVFormatContext *pAVFormatCtx, AVStream** ppAVStream, AVMediaType avMediaType, AVCodecParameters *pInCodecParameters, int64_t duration)
- (int)add_stream:(AVFormatContext*)pAVFormatCtx
       ppAVStream:(AVStream**)ppAVStream
      avMediaType:(AVMediaType)avMediaType
pInCodecParameters:(AVCodecContext *)pInCodecParameters
         duration:(int64_t) duration
{
    
    if (NULL == pAVFormatCtx)        return -1;

    AVStream* pAVStream = avformat_new_stream(pAVFormatCtx, NULL);
    if (NULL == pAVStream)                return -2;

    int streamId = pAVFormatCtx->nb_streams - 1;
    //AVCodec AVCodecContext
    AVCodecContext * pAVCodec = pAVStream->codec;

    pAVStream->id = streamId;
    pAVCodec->codec_id = pInCodecParameters->codec_id;
    pAVCodec->codec_type = avMediaType;
    
    if (pInCodecParameters->extradata_size > 0)
    {
        pAVCodec->extradata = (uint8_t*)av_mallocz(pInCodecParameters->extradata_size + 64);
        if (NULL != pAVCodec->extradata)
        {
            memcpy(pAVCodec->extradata, pInCodecParameters->extradata, pInCodecParameters->extradata_size);
            pAVCodec->extradata_size = pInCodecParameters->extradata_size;
        }
    }
    if (avMediaType == AVMEDIA_TYPE_VIDEO)
    {
        pAVStream->time_base.den = 90000;
        pAVStream->time_base.num = 1;

        pAVFormatCtx->video_codec_id = pInCodecParameters->codec_id;
        pAVCodec->bit_rate = pInCodecParameters->bit_rate;

        int video_width = pInCodecParameters->width;
        int video_height = pInCodecParameters->height;

        pAVCodec->width = video_width;
        pAVCodec->height = video_height;
        pAVCodec->framerate.den = pInCodecParameters->framerate.den; //1;   // pInCodecParameters->framerate.den;
        pAVCodec->framerate.num = pInCodecParameters->framerate.num; //25;    //pInCodecParameters->framerate.num;
        if (pAVCodec->framerate.num < 1)
        {
            pAVCodec->framerate.num = 25;
            if (duration > 0)
            {
                pAVCodec->framerate.num = 90000 / duration;
            }
        }

        //if (pInCodecParameters->codec_id == AV_CODEC_ID_HEVC)
        //{
        //    //pAVCodec->codec_tag = MKTAG('H', '2', '6', '5');
        //    pAVCodec->codec_tag = MKTAG('H', 'E', 'V', 'C');
        //}


        //pAVCodec->sample_aspect_ratio

        //pAVCodec->time_base.num = 1;
        //pAVCodec->time_base.den = (pFFPusherCore->mediaInfo.videoFps > 0 ? pFFPusherCore->mediaInfo.videoFps : 25);
        //pAVCodec->gop_size = 12;
        //pAVCodec->pix_fmt = AV_PIX_FMT_NV12;
    }
    else if (avMediaType == AVMEDIA_TYPE_AUDIO)
    {
        pAVFormatCtx->audio_codec_id = pInCodecParameters->codec_id;

        //pAVCodec->sample_fmt = pCodec->sample_fmts ?  pCodec->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
        //pAVCodec->sample_fmt = AV_SAMPLE_FMT_FLTP;
        pAVCodec->bit_rate = pInCodecParameters->bit_rate;
        pAVCodec->sample_rate = pInCodecParameters->sample_rate;
        pAVCodec->channels = pInCodecParameters->channels;
        pAVCodec->channel_layout = pInCodecParameters->channel_layout;
        /*
        pAVCodec->time_base.num = 1;
        pAVCodec->time_base.den = 8;
        pAVCodec->frame_size = 1024;
        pAVCodec->profile = 2;    //1(Main)   2(LW)   3(SSR)
        */
        //pAVCodec->pkt_timebase.den = 90000;//125;
        //pAVCodec->pkt_timebase.num = 1;
    }

    //pAVCodec->codec_tag = 0;
    // Some formats want stream headers to be separate.
    if (pAVFormatCtx->oformat->flags & AVFMT_GLOBALHEADER) {
        //pAVCodec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    *ppAVStream = pAVStream;
    return 0;
}


// 写MP4文件
- (int)saveRtspVideo {
    if (recordMP4 == 0) // 停止时，设置为0；录制中是1.
    {
        [self closeRecord];
        return 0;
    }
    if (recordErrCode < 0) return 0;     // 内部报错, 不再重试

    if (packet.stream_index != videoStream) return 0;
    
//    NSLog(@"saveRtspVideo---------------");
    int ret = 0;
    //AVPacket	  newPkt = packet;
    AVPacket avPacket;
    av_init_packet(&avPacket);
//    //avPacket = packet;
    av_packet_ref(&avPacket, &packet);

//    av_packet_unref(&avPacket);
//    return 0;
    
    
    //avPacket.data = NULL;
    //avPacket.size = 0;
    //AVPacket *avPacket = av_packet_clone(packet);
    do
    {
        if (NULL == outputAVFormatContext)
        {
            if (0 == strcmp(filename, "\0"))  break;
            
            if(!(packet.flags&AV_PKT_FLAG_KEY)) { // 保证第一个帧，是关键帧。
                break;
            }
            NSLog(@"开始录制------------");
            avformat_alloc_output_context2(&outputAVFormatContext, NULL, "mp4", filename);
            if (!outputAVFormatContext) {
                printf("Could not deduce output format from file extension: using MPEG.\n");
                avformat_alloc_output_context2(&outputAVFormatContext, NULL, "mpeg", filename);
            }
            if (!outputAVFormatContext) {
                ret = -2;
                NSLog(@"error------ret= -2------");
                break;
            }
            //mMfvpSourceCore.recorder.fmt = (AVOutputFormat*)outputAVFormatContext->oformat;

            ret = avio_open(&outputAVFormatContext->pb, filename, AVIO_FLAG_WRITE);
            if (ret < 0)
            {
                avformat_free_context(outputAVFormatContext);
                outputAVFormatContext = NULL;
                ret = -200;
                NSLog(@"error------ret= -200------");
                break;
            }

            //if (mMfvpSourceCore.recorder.fmt->video_codec != AV_CODEC_ID_NONE) {
            if (outputAVFormatContext->oformat->video_codec != AV_CODEC_ID_NONE && NULL != pFormatCtx->streams[videoStream]->codec)
            {
//                TODO OC 函数调用，此处是否有问题
//                if (add_stream(outputAVFormatContext, &output_video_stream, AVMEDIA_TYPE_VIDEO, pFormatCtx->streams[videoStream]->codec, packet->duration) < 0)        //AV_CODEC_ID_H264
               if([self add_stream:outputAVFormatContext ppAVStream:&output_video_stream avMediaType:AVMEDIA_TYPE_VIDEO pInCodecParameters:pFormatCtx->streams[videoStream]->codec
                          duration:packet.duration] < 0)
                {
                    avio_close(outputAVFormatContext->pb);
                    avformat_free_context(outputAVFormatContext);
                    outputAVFormatContext = NULL;
                    ret = -201;
                    NSLog(@"error------ret= -201------");
                    break;
                }


		        //if (pFormatCtx->streams[videoStream]->codec->codec_id==AV_CODEC_ID_H264)
			        bsfc = av_bitstream_filter_init("h264_mp4toannexb");

                if (!(outputAVFormatContext->flags & AVFMT_NOFILE))
                {
                    av_dump_format(outputAVFormatContext, 0, filename, 1);
                }
                recordVideoDTS = 0;

                if (ret == 0)
                {
                    ret = avformat_write_header(outputAVFormatContext, NULL);
                    NSLog(@"开始录制------------avformat_write_header");
                    if (ret != 0)
                    {
                        ret = -203;
                        NSLog(@"error------ret= -203------");
                        break;
                    }
                }
            }
        }
        // end outputavc null
        
        
        
        if (NULL != outputAVFormatContext)
        {

            //printf("pts:%lld      dts:%lld\n", avPacket->pts, avPacket->dts);

            //int64_t dts = avPacket->dts;
            //int64_t pts = avPacket->pts;

            //avPacket->pts = pts / 90;
            //avPacket->dts = dts / 90;

            
				if (NULL != bsfc)
				{
#if 1
                    AVCodecContext *pCodec =pFormatCtx->streams[videoStream]->codec;
                    
                    
					ret = av_bitstream_filter_filter(bsfc, pCodec,
												NULL, &avPacket.data, &avPacket.size, packet.data, packet.size, packet.flags&AV_PKT_FLAG_KEY);
#else
					ret = av_bitstream_filter_filter(bsfc, ffsource->pAVFormatCtx->streams[avPacket.stream_index]->codec,
												NULL, &newPkt->data, &newPkt->size, avPacket.data, avPacket.size, avPacket.flags&AV_PKT_FLAG_KEY);
#endif
				}


//            if (avMediaType == AVMEDIA_TYPE_VIDEO)
            {
                if(output_video_stream == nil) break;
//                newPkt.stream_index = output_video_stream->index; // bug
                avPacket.stream_index = 0;
                
//                if (packet.duration > 0)
//                {
//                    recordVideoDTS += packet.duration;
//                }
//                else
//                {
////                    recordVideoDTS += (output_video_stream->r_frame_rate.num * 90);
//                    recordVideoDTS += 3600;
//                }
                recordVideoDTS += 3600;
                
                avPacket.dts = recordVideoDTS;
                avPacket.pts = recordVideoDTS;
                //newPkt->duration = 40;

//                printf("avPacket->flags: %d\n", avPacket->flags);

                //avPacket->pos = 0;
                //avPacket->flags = 1;
                //avPacket->time_base.den = 25;
                //avPacket->time_base.num = 1;
            }
//            else if (avMediaType == AVMEDIA_TYPE_AUDIO)
//            {
////                avPacket->stream_index = output_audio_stream->index;
//            }
            ret = av_interleaved_write_frame(outputAVFormatContext, &avPacket);
//            NSLog(@"开始录制------------av_interleaved_write_frame ret=%d, dts=%lld", ret, recordVideoDTS);
            
            //avPacket->pts = pts;
            //avPacket->dts = dts;

            if (ret != 0)
            {
                NSLog(@"av_interleaved_write_frame error...");

                ret = -1000;
                break;
            }

            ret = 0;
        }

    }while (0);
    av_packet_unref(&avPacket);
    av_free(avPacket.data);

    
    recordErrCode = ret;
//    NSLog(@"开始录制------------avformat_write_header end");
//    av_free_packet(&newPkt);
    return ret;
}


#pragma mark - 开始录像，设置录像中标记，设置文件名称
- (void)startRecordRtspVideo {
    NSString *docPath = NSTemporaryDirectory();
    // 时间格式化
    NSDateFormatter *dateFormater = [[NSDateFormatter alloc]init];
    [dateFormater setDateFormat:@"yyyy-MM-dd_HH:mm:ss"];
    NSString *currentDateStr = [dateFormater stringFromDate:[NSDate date]];
    NSString *fStr = [NSString stringWithFormat:@"%@.mp4", currentDateStr];
    NSLog(@"fstr= %@", fStr);
    // 获取完整video 文件相对路径
    NSString *filePath = [docPath stringByAppendingPathComponent:fStr];
    NSLog(@"filePath= %@", filePath); // NSString 转 char filename[]

    strcpy(filename, [filePath cStringUsingEncoding:NSASCIIStringEncoding]);
    NSLog(@"css====%s ",filename);
    recordMP4 = 1;
    NSLog(@"开始视频录像，设置录像中标记，设置文件名称");
    return;
}

#pragma mark  录像结束，修改标记
-(void)stopRrcordRtspVideo {
    recordMP4 = 0;
    [self closeRecord];
    NSLog(@"视频录制，保存完成");
}

-(void)video:(NSString *)videoPath
didFinishSavingWithError:(NSError *)error
 contextInfo:(void *)contextInfo {
    if (error) {
        NSLog(@"保存视频失败：%@", error);
    } else {
        NSLog(@"保存视频成功");
    }
}


- (void)convertFrameToRGB
{
    sws_scale(img_convert_ctx,
              pFrame->data,
              pFrame->linesize,
              0,
              pCodecCtx->height,
              picture.data,
              picture.linesize);
}

- (UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height
{
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pict.data[0], pict.linesize[0]*height,kCFAllocatorNull);
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGImageRef cgImage = CGImageCreate(width,
                                       height,
                                       8,
                                       24,
                                       pict.linesize[0],
                                       colorSpace,
                                       bitmapInfo,
                                       provider,
                                       NULL,
                                       NO,
                                       kCGRenderingIntentDefault);
    CGColorSpaceRelease(colorSpace);
    UIImage *image = [UIImage imageWithCGImage:cgImage];
    
    CGImageRelease(cgImage);
    CGDataProviderRelease(provider);
    CFRelease(data);
    
    return image;
}

- (void)setupAudioDecoder
{
    if (audioStream >= 0) {
        _audioBufferSize = AVCODEC_MAX_AUDIO_FRAME_SIZE;
        _audioBuffer = av_malloc(_audioBufferSize);
        _inBuffer = NO;
        
        _audioCodecContext = pFormatCtx->streams[audioStream]->codec;
        _audioStream = pFormatCtx->streams[audioStream];
        
        AVCodec *codec = avcodec_find_decoder(_audioCodecContext->codec_id);
        if (codec == NULL) {
            NSLog(@"Not found audio codec.");
            return;
        }
        
        if (avcodec_open2(_audioCodecContext, codec, NULL) < 0) {
            NSLog(@"Could not open audio codec.");
            return;
        }
        
        if (audioPacketQueue) {
            [audioPacketQueue release];
            audioPacketQueue = nil;
        }
        audioPacketQueue = [[NSMutableArray alloc] init];
        
        if (audioPacketQueueLock) {
            [audioPacketQueueLock release];
            audioPacketQueueLock = nil;
        }
        audioPacketQueueLock = [[NSLock alloc] init];
        
        if (_audioController) {
            [_audioController _stopAudio];
            [_audioController release];
            _audioController = nil;
        }
        _audioController = [[AudioStreamer alloc] initWithStreamer:self];
    } else {
        pFormatCtx->streams[audioStream]->discard = AVDISCARD_ALL;
        audioStream = -1;
    }
}

- (void)nextPacket
{
    _inBuffer = NO;
}

- (AVPacket*)readPacket
{
    if (_currentPacket.size > 0 || _inBuffer) return &_currentPacket;
    
    NSMutableData *packetData = [audioPacketQueue objectAtIndex:0];
    _packet = [packetData mutableBytes];
    
    if (_packet) {
        if (_packet->dts != AV_NOPTS_VALUE) {
            _packet->dts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
        }
        
        if (_packet->pts != AV_NOPTS_VALUE) {
            _packet->pts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
        }
        
        [audioPacketQueueLock lock];
        audioPacketQueueSize -= _packet->size;
        if ([audioPacketQueue count] > 0) {
            [audioPacketQueue removeObjectAtIndex:0];
        }
        [audioPacketQueueLock unlock];
        
        _currentPacket = *(_packet);
    }
    
    return &_currentPacket;
}

- (void)closeAudio
{
    [_audioController _stopAudio];
    primed=NO;
}

- (void)savePPMPicture:(AVPicture)pict width:(int)width height:(int)height index:(int)iFrame
{
    FILE *pFile;
    NSString *fileName;
    int  y;
    
    fileName = [Utilities documentsPath:[NSString stringWithFormat:@"image%04d.ppm",iFrame]];
    // Open file
    NSLog(@"write image file: %@",fileName);
    pFile=fopen([fileName cStringUsingEncoding:NSASCIIStringEncoding], "wb");
    if (pFile == NULL) {
        return;
    }
    
    // Write header
    fprintf(pFile, "P6\n%d %d\n255\n", width, height);
    
    // Write pixel data
    for (y=0; y<height; y++) {
        fwrite(pict.data[0]+y*pict.linesize[0], 1, width*3, pFile);
    }
    
    // Close file
    fclose(pFile);
}

@end

