//
//  AGDChatViewController.m
//  AgoraDemo
//
//  Created by apple on 15/9/9.
//  Copyright (c) 2015年 Agora. All rights reserved.
//

#import <CoreVideo/CoreVideo.h>
#import <Accelerate/Accelerate.h>


#import "NSBundle+LLPackages.h"

#define  __ENABLE_GPUIMAGE__

#import "AGDChatViewController.h"

#include "AgoraRtcEngineKit/IAgoraRtcEngine.h"
#include "AgoraRtcEngineKit/IAgoraMediaEngine.h"

#import "AGDCAudioFrameObserver.h"

#import "AGDCVideoFrameObserver.hpp"

#import <IJKMediaFramework/VCSimpleSession.h>

#import <FaceFilterFramework/FaceFilter.h>

#ifdef __ENABLE_GPUIMAGE__

//#import <GPUImage/GPUImage.h>

#endif


void subscriberVideoDataReceived(uint8_t* data, size_t size, int w, int h, int rawType, bool isMain, void *context);

void subscriberAudioDataReceived(uint8_t* data, size_t size, int inNumberFrames, bool isLocal, void *context);


#define __TEST__

//#define __GUESTING__  1
//#define __PUSH_MEDIA_CDN__

@interface AGDChatViewController () <VCSessionDelegate>
{
    __block AgoraRtcStats *lastStat_;

}


//add by tzx
#ifdef __ENABLE_GPUIMAGE__

//@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;

#endif


//Configuration
@property (nonatomic, assign) CGSize  videoSize;
@property (nonatomic, assign) NSUInteger expectedSourceVideoFrameRate;
@property (nonatomic, assign) NSUInteger averageBitRate;
@property (nonatomic, strong) NSString   *publishUrl;
@property (atomic, assign) BOOL canInputFrame;

//end by

@property (strong, nonatomic) IBOutletCollection(UIButton) NSArray *speakerControlButtons;
@property (strong, nonatomic) IBOutletCollection(UIButton) NSArray *audioMuteControlButtons;
@property (weak, nonatomic) IBOutlet UIButton *cameraControlButton;

@property (weak, nonatomic) IBOutlet UIView *audioControlView;
@property (weak, nonatomic) IBOutlet UIView *videoControlView;

@property (weak, nonatomic) IBOutlet UIView *videoMainView;

@property (weak, nonatomic) IBOutlet UICollectionView *collectionView;

@property (weak, nonatomic) IBOutlet UILabel *talkTimeLabel;
@property (weak, nonatomic) IBOutlet UILabel *dataTrafficLabel;
@property (weak, nonatomic) IBOutlet UILabel *alertLabel;

@property (weak, nonatomic) IBOutlet UIButton *videoButton;
@property (weak, nonatomic) IBOutlet UIButton *audioButton;

@property (strong, nonatomic) AgoraRtcEngineKit *agoraKit;

@property (strong, nonatomic) NSMutableArray *uids;
@property (strong, nonatomic) NSMutableDictionary *videoMuteForUids;

//
@property (assign, nonatomic) AGDChatType type;
@property (strong, nonatomic) NSString *channel;
@property (strong, nonatomic) NSString *vendorKey;
@property (assign, nonatomic) BOOL agoraVideoEnabled;
@property (strong, nonatomic) NSTimer *durationTimer;
@property (nonatomic) NSUInteger duration;

@property (strong, nonatomic) UIAlertView *errorKeyAlert;

@property (nonatomic, strong) NSBundle *certBundle;

//@property (nonatomic, strong) NSBundle *resourceBundle;

@property (nonatomic, copy) NSString *resourceName;


@property (nonatomic, strong) NSBundle *face_dahuziBundle;

@property (nonatomic, strong) NSBundle *face_pp_babyBundle;
@property (nonatomic, strong) NSBundle *face_pp_beautyBundle;
@property (nonatomic, strong) NSBundle *mousterBundle;
@property (nonatomic, strong) NSBundle *ranbowpukeBundle;

@end

@implementation AGDChatViewController{
    //CSouceAudioFrame          m_audio;
    CVideoFrameObserver         *m_videoObserver;
    
    CSouceAudioFrameObserver    *m_audioObserver;
    VCSimpleSession             *_vcSimpleSession;
    
    FaceFilter                  *_filter;
    CGFloat                     _previewImageWidth;
    CGFloat                     _previewImageHeight;
    
    NSString                    *_licensePathFile;
    NSBundle                    *resourceBundle;
    bool                        bFilter;
}


- (void)viewDidLoad {
    [super viewDidLoad];
    
    bFilter = true;
    
    NSString *docPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
    
    const char * docPathStr = [docPath UTF8String];
    char outfile[256];
    char inFile[256];
    
    sprintf(outfile, "%s/out.pcm", docPathStr);
    
    sprintf(inFile, "%s/file_in.pcm", docPathStr);
    
    self.resourceName = [[NSBundle mainBundle]
                               pathForResource:@"face_stock_packages" ofType:@"bundle"];
    self.certBundle = [NSBundle ll_bundleWithPackageName:@"face_cert" andStockRootPath:self.resourceName];
    
    
    self.face_dahuziBundle = [NSBundle ll_bundleWithPackageName:@"face_dahuzi" andStockRootPath:self.resourceName];
    
    self.face_pp_babyBundle = [NSBundle ll_bundleWithPackageName:@"face_pp_baby" andStockRootPath:self.resourceName];
 
    g_pcmFile = fopen(outfile, "wb");
    g_pInFile = fopen(inFile, "rb");
    
    //add by tzx
    //Init Configuration
    self.videoSize = CGSizeMake(480, 640);
    self.expectedSourceVideoFrameRate = 20;
    self.averageBitRate = 600;
    //    self.publishUrl = @"rtmp://fso.dca.35320.betacdn.net/2035320/default/slk?a3898FrplE9oP47b&adbe-live-event=test";
    self.publishUrl = @"rtmp://push1.arenazb.hupu.com/test/tzx";
    
    self.canInputFrame = NO;
    
#ifdef __TEST__
    
    m_videoObserver = new CVideoFrameObserver();
    
    m_videoObserver->setCallback((VideoOutputCallback)subscriberVideoDataReceived, (__bridge void *)(self));
    
    m_audioObserver = new CSouceAudioFrameObserver();
    
    m_audioObserver->setCallback((AudioOutputCallback)subscriberAudioDataReceived, (__bridge void *)(self));
    
    AVAudioFormat *format = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 sampleRate:kSampleRate channels:1 interleaved:YES];
    
    _vcSimpleSession = [[VCSimpleSession alloc] initWithVideoSize:CGSizeMake(368, 640) frameRate:20 bitrate:600000 andAVAudioFormat:format];
    _vcSimpleSession.delegate = self;
#endif   //end by
    
    self.uids = [NSMutableArray array];
    self.videoMuteForUids = [NSMutableDictionary dictionary];
    
    self.channel = [self.dictionary objectForKey:AGDKeyChannel];
    self.vendorKey = [self.dictionary objectForKey:AGDKeyVendorKey];
    self.type = self.chatType;
    
    self.title = [NSString stringWithFormat:@"%@ %@",NSLocalizedString(@"room", nil), self.channel];
//    [self selectSpeakerButtons:YES];
    [self initAgoraKit];
    NSLog(@"self: %@", self);
    
#ifdef __PUSH_MEDIA_CDN__
    
    //rtmp://push1.arenazb.hupu.com/test/tzx  push
    //rtmp://pull1.arenazb.hupu.com/test/tzx  pull
    [_vcSimpleSession startRtmpSessionWithURL:@"rtmp://push1.arenazb.hupu.com/test/tzx" andStreamKey:nil];
#endif
    
#if  __GUESTING__
    [_vcSimpleSession startMediaGuesting];
#endif
    
}

- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    
    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
    
    // pass frame to encoders
    if (connection == _videoConnection) {
        if(!_filter){
            _previewImageWidth = CVPixelBufferGetWidth(pixelBuffer);
            _previewImageHeight = CVPixelBufferGetHeight(pixelBuffer);
            
            _filter = [[FaceFilter alloc] init];
            [_filter Init:({
                FaceFilterCert *cert = [FaceFilterCert new];
                cert.certPath = [self.certBundle pathForResource:@"license" ofType:@"lic"];
                cert;
            }) withSize:CGSizeMake(_previewImageWidth, _previewImageHeight)];
        }
        
        if(bFilter){
            bool bRet = [_filter doFilter:self.face_dahuziBundle withData:pixelBuffer withTime:CMTimeGetSeconds(pts) withDictionary:nil];
            if(bRet != true){
                NSLog(@"doFilter error");
            }
        }
        uint8_t* data = (uint8_t*)CVPixelBufferGetBaseAddress(pixelBuffer);
        
        int frameSize = _previewImageWidth*_previewImageHeight*4;
        
        m_videoObserver->inputVideoFrame(data, frameSize, _previewImageWidth, _previewImageHeight, 0x05);
        
        CVPixelBufferUnlockBaseAddress(pixelBuffer,kCVPixelBufferLock_ReadOnly);
        
#ifdef __PUSH_MEDIA_CDN__
        [_vcSimpleSession pushCameraBuffer:pixelBuffer posXTag:0 posYTag:0 widthTag:width heightTag:height main:YES];
#endif
        
//        CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer options:nil];
//        if(ciImage){
//            
//        }
        
    } else if (connection == _audioConnection) {
    }
}

- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for ( AVCaptureDevice *device in devices )
        if ( device.position == position )
            return device;
    return nil;
}


- (void) setupVideoCapture {
    NSError *error = nil;
    
    [_session beginConfiguration];
    
    [_session setSessionPreset:AVCaptureSessionPreset640x480]; //AVCaptureSessionPresetMedium] ;//]
    
    AVCaptureDevice* videoDeviceTemp = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    AVCaptureDevice* videoDevice = [self cameraWithPosition:AVCaptureDevicePositionFront];
    
    AVCaptureDeviceInput* videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
    if (error) {
        NSLog(@"Error getting video input device: %@", error.description);
    }
    if ([_session canAddInput:videoInput]) {
        [_session addInput:videoInput];
    }
    
    // create an output for YUV output with self as delegate
    _videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
    _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [_videoOutput setSampleBufferDelegate:self queue:_videoQueue];
    NSDictionary *captureSettings = @{(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
    _videoOutput.videoSettings = captureSettings;
    _videoOutput.alwaysDiscardsLateVideoFrames = YES;
    if ([_session canAddOutput:_videoOutput]) {
        [_session addOutput:_videoOutput];
        
        // [self setActiveFrameRateImpl:VIDEO_CAPTURE_IOS_DEFAULT_INITIAL_FRAMERATE andLocnfig:(BOOL)FALSE andINPUT:videoInput];//add by tzx
    }
    _videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
    
    
    AVCaptureConnection *connection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
    if ([connection isVideoOrientationSupported]) {
        AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait; //AVCaptureVideoOrientationLandscapeLeft;//
        [connection setVideoOrientation:orientation];
    }
    
    [_session commitConfiguration ];
}


- (void) setupSession {
    _session = [[AVCaptureSession alloc] init];
    [self setupVideoCapture];
    
#ifdef __ENABLE__UNIT__
    [self setupAudio];
#else
    
#endif
    // start capture and a preview layer
//    [_session startRunning];
    
#ifdef __ENABLE__UNIT__
    AudioOutputUnitStart(m_audioUnit);
#endif
    _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}

#ifdef __ENABLE_GPUIMAGE__

#if 0
- (void)setupGPUImage
{
    GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];//AVCaptureDevicePositionFront
    videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
    videoCamera.frameRate = [self expectedSourceVideoFrameRate];
    
    GPUImageRGBFilter *filter = [[GPUImageRGBFilter alloc] init];
    
    CGRect bounds = [UIScreen mainScreen].bounds;
    GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:bounds];
    
    // Add the view somewhere so it's visible
    //    [self.view addSubview:filteredVideoView];
    [self.view insertSubview:filteredVideoView atIndex:0];
    
    [videoCamera addTarget:filter];
    [filter addTarget:filteredVideoView];
    
    GPUImageRawDataOutput *rawDataOutput = [[GPUImageRawDataOutput alloc] initWithImageSize:[self videoSize] resultsInBGRAFormat:YES];
    [filter addTarget:rawDataOutput];
    __weak GPUImageRawDataOutput *weakOutput = rawDataOutput;
    __weak typeof(self) wself = self;
    [rawDataOutput setNewFrameAvailableBlock:^{
        //        __strong GPUImageRawDataOutput *strongOutput = weakOutput;
        //        __strong typeof(wself) strongSelf = wself;
        [weakOutput lockFramebufferForReading];
        GLubyte *outputBytes = [weakOutput rawBytesForImage];
        NSInteger bytesPerRow = [weakOutput bytesPerRowInOutput];
        CVPixelBufferRef pixelBuffer = NULL;
        CVPixelBufferCreateWithBytes(kCFAllocatorDefault, [self videoSize].width, [self videoSize].height, kCVPixelFormatType_32BGRA, outputBytes, bytesPerRow, nil, nil, nil, &pixelBuffer);
        [weakOutput unlockFramebufferAfterReading];
        if(pixelBuffer == NULL) {
            return ;
        }
        //if(wself.canInputFrame)
        {
            //[wself.session inputPixelBuffer:pixelBuffer];
            
            CVPixelBufferLockBaseAddress(pixelBuffer,kCVPixelBufferLock_ReadOnly);
            
            uint8_t* data = (uint8_t*)CVPixelBufferGetBaseAddress(pixelBuffer);
            //        int frameSize = (int)CVPixelBufferGetDataSize(pixelBuffer);
            int width = (int)CVPixelBufferGetWidth(pixelBuffer);
            int height = (int)CVPixelBufferGetHeight(pixelBuffer);
            
            int frameSize = width*height*4;
            
            //m_pixelBufferSource->pushPixelBuffer(data, frameSize);
            m_videoObserver->inputVideoFrame(data, frameSize, width, height, 0x05);
            
#ifdef __PUSH_MEDIA_CDN__
            
            [_vcSimpleSession pushCameraBuffer:pixelBuffer posXTag:0 posYTag:0 widthTag:width heightTag:height main:YES];
#endif
            
            CVPixelBufferUnlockBaseAddress(pixelBuffer,kCVPixelBufferLock_ReadOnly);
            
            
        }
        CVPixelBufferRelease(pixelBuffer);
    }];
    [videoCamera startCameraCapture];
    
    self.videoCamera = videoCamera;
}
#endif
#endif


- (void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];
    self.videoMainView.frame = self.videoMainView.superview.bounds; // video view's autolayout cause crash
    
    [self.agoraKit setParameters:@"{\"che.audio.alternative.mode\":true}"];
    
    [self joinChannel];
    
    [self.agoraKit setParameters:@"{\"che.audio.set_render_raw_audio_interval\":1024}"];
    
    [self.agoraKit setParameters:@"{\"che.audio.set_capture_raw_audio_interval\":1024}"];

    
#ifdef    __TEST__
    //capture audio
    int sampleRate = 44100; // 采集端采样率
    int channelCnt = 1;     // 采集端通道数
    int mode = 1;           // 保持=1
    NSString *parameters = [NSString stringWithFormat:@"{\"che.audio.set_capture_raw_audio_format\":{\"sampleRate\":%d, \"channelCnt\":%d, \"mode\":%d}}", sampleRate, channelCnt, mode];
    //[self.agoraRtcEngine setParameters:parameters];
    [self.agoraKit setParameters:parameters];
    
    //render audio
    //    int sampleRate = 44100; // 播放端采样率
    //    int channelCnt = 1;     // 播放端通道数
    mode = 0;           // 保持=1
    NSString *RenderParameters = [NSString stringWithFormat:@"{\"che.audio.set_render_raw_audio_format\":{\"sampleRate\":%d, \"channelCnt\":%d, \"mode\":%d}}", sampleRate, channelCnt, mode];
    [self.agoraKit setParameters:RenderParameters];
    
    
    [self.agoraKit setParameters:@"{\"che.video.local.camera_index\": 1024}"]; //iOS
    
    [self.agoraKit setParameters:@"{\"che.audio.external_device\":true}"];

#endif
    
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

#pragma mark -


- (void)initAgoraKit
{
    // use test key
    //self.agoraKit = [AgoraRtcEngineKit sharedEngineWithVendorKey:self.vendorKey delegate:self];
    self.agoraKit = [AgoraRtcEngineKit sharedEngineWithAppId:self.vendorKey extensionDelegate:self];
    
#ifdef __TEST__
    agora::rtc::IRtcEngine* rtc_engine = (agora::rtc::IRtcEngine*)self.agoraKit.getNativeHandle;
    agora::util::AutoPtr<agora::media::IMediaEngine> mediaEngine;
    mediaEngine.queryInterface(*rtc_engine, agora::rtc::AGORA_IID_MEDIA_ENGINE);
    if (mediaEngine){
        mediaEngine->registerVideoFrameObserver(m_videoObserver);
        mediaEngine->registerAudioFrameObserver(m_audioObserver);
    }
    
    m_audioObserver->setupAudioUnitEX();

#endif
    
    [self setUpVideo];
    [self setUpBlocks];
    
#ifdef __TEST__
#ifdef __ENABLE_GPUIMAGE__
    //[self setupGPUImage];
    [self setupSession];
#endif
    
    m_audioObserver->startAudio();
#endif
}

- (void)joinChannel
{
    [_session startRunning];
    
    [self showAlertLabelWithString:NSLocalizedString(@"wait_attendees", nil)];
    __weak typeof(self) weakSelf = self;
    
    
    [self.agoraKit joinChannelByKey:nil channelName:self.channel info:nil uid:0 joinSuccess:^(NSString *channel, NSUInteger uid, NSInteger elapsed) {
        
        [weakSelf.agoraKit setEnableSpeakerphone:YES];
        if (weakSelf.type == AGDChatTypeAudio) {
            [weakSelf.agoraKit disableVideo];
        }
        
        [UIApplication sharedApplication].idleTimerDisabled = YES;
        
        NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
        [userDefaults setObject:weakSelf.vendorKey forKey:AGDKeyVendorKey];
    }];
}

- (void)setUpVideo
{
    [self.agoraKit enableVideo];
    
    AgoraRtcVideoCanvas *videoCanvas = [[AgoraRtcVideoCanvas alloc] init];
    videoCanvas.uid = 0;
#ifdef __TEST__
    videoCanvas.view = self.videoMainView;//nil;
#else
    videoCanvas.view = self.videoMainView;
#endif
    videoCanvas.renderMode = AgoraRtc_Render_Fit;
    [self.agoraKit setupLocalVideo:videoCanvas];
    
//    [self.agoraKit setVideoProfile:AgoraRtc_VideoProfile_480P swapWidthAndHeight:NO];
    
    [self.agoraKit setVideoProfile: AgoraRtc_VideoProfile_Invalid swapWidthAndHeight:false];

    
    [self.agoraKit setVideoProfileEx:480 andHeight:640  andFrameRate: 20 andBitrate: 600000];
    //[self.agoraKit setVideoProfileEx:640 andHeight:640  andFrameRate: 20 andBitrate: 600000];
    //这个接口才会生效
}

- (void)rtcEngine:(AgoraRtcEngineKit *)engine firstLocalVideoFrameWithSize:(CGSize)size elapsed:(NSInteger)elapsed
{
    NSLog(@"local video display");
    __weak typeof(self) weakSelf = self;
    weakSelf.videoMainView.frame = weakSelf.videoMainView.superview.bounds; // video view's autolayout cause crash
}

- (void)rtcEngine:(AgoraRtcEngineKit *)engine didJoinedOfUid:(NSUInteger)uid elapsed:(NSInteger)elapsed
{
    __weak typeof(self) weakSelf = self;
    NSLog(@"self: %@", weakSelf);
    NSLog(@"engine: %@", engine);
    [weakSelf hideAlertLabel];
    [weakSelf.uids addObject:@(uid)];
    
    [weakSelf.collectionView insertItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:weakSelf.uids.count-1 inSection:0]]];
}
- (void)rtcEngine:(AgoraRtcEngineKit *)engine didOfflineOfUid:(NSUInteger)uid reason:(AgoraRtcUserOfflineReason)reason
{
    __weak typeof(self) weakSelf = self;
    NSInteger index = [weakSelf.uids indexOfObject:@(uid)];
    if (index != NSNotFound) {
        NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0];
        [weakSelf.uids removeObjectAtIndex:index];
        [weakSelf.collectionView deleteItemsAtIndexPaths:@[indexPath]];
    }
}

- (void)rtcEngine:(AgoraRtcEngineKit *)engine didVideoMuted:(BOOL)muted byUid:(NSUInteger)uid
{
    __weak typeof(self) weakSelf = self;
    NSLog(@"user %@ mute video: %@", @(uid), muted ? @"YES" : @"NO");
    
    [weakSelf.videoMuteForUids setObject:@(muted) forKey:@(uid)];
    [weakSelf.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:[weakSelf.uids indexOfObject:@(uid)] inSection:0]]];
}

- (void)rtcEngineConnectionDidLost:(AgoraRtcEngineKit *)engine
{
    __weak typeof(self) weakSelf = self;
    [weakSelf showAlertLabelWithString:NSLocalizedString(@"no_network", nil)];
    weakSelf.videoMainView.hidden = YES;
    weakSelf.dataTrafficLabel.text = @"0KB/s";
    
    
    [_session stopRunning];
    
#if  __GUESTING__
    //[_vcSimpleSession stopMediaGuesting];
#endif
}

- (void)rtcEngine:(AgoraRtcEngineKit *)engine reportRtcStats:(AgoraRtcStats*)stats
{
    __weak typeof(self) weakSelf = self;
    // Update talk time
    if (weakSelf.duration == 0 && !weakSelf.durationTimer) {
        weakSelf.talkTimeLabel.text = @"00:00";
        weakSelf.durationTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:weakSelf selector:@selector(updateTalkTimeLabel) userInfo:nil repeats:YES];
    }
    
    NSUInteger traffic = (stats.txBytes + stats.rxBytes - lastStat_.txBytes - lastStat_.rxBytes) / 1024;
    NSUInteger speed = traffic / (stats.duration - lastStat_.duration);
    NSString *trafficString = [NSString stringWithFormat:@"%@KB/s", @(speed)];
    weakSelf.dataTrafficLabel.text = trafficString;
    
    lastStat_ = stats;
}

- (void)rtcEngine:(AgoraRtcEngineKit *)engine didOccurError:(AgoraRtcErrorCode)errorCode
{
    __weak typeof(self) weakSelf = self;
    if (errorCode == AgoraRtc_Error_InvalidAppId) {
        [weakSelf.agoraKit leaveChannel:nil];
        [weakSelf.errorKeyAlert show];
    }
    
#if  __GUESTING__
    
    //[_vcSimpleSession stopMediaGuesting];
    
    int i = 0;
    i = 1;
#endif
}

- (void)setUpBlocks
{
//    [self.agoraKit rtcStatsBlock:^(AgoraRtcStats *stat) {
//        // Update talk time
//        if (self.duration == 0 && !self.durationTimer) {
//            self.talkTimeLabel.text = @"00:00";
//            self.durationTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(updateTalkTimeLabel) userInfo:nil repeats:YES];
//        }
//        
//        NSUInteger traffic = (stat.txBytes + stat.rxBytes - lastStat_.txBytes - lastStat_.rxBytes) / 1024;
//        NSUInteger speed = traffic / (stat.duration - lastStat_.duration);
//        NSString *trafficString = [NSString stringWithFormat:@"%@KB/s", @(speed)];
//        self.dataTrafficLabel.text = trafficString;
//        
//        lastStat_ = stat;
//    }];
    
//    [self.agoraKit userJoinedBlock:^(NSUInteger uid, NSInteger elapsed) {
//        [self hideAlertLabel];
//        [self.uids addObject:@(uid)];
//        
//        [self.collectionView insertItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:self.uids.count-1 inSection:0]]];
//    }];
    
//    [self.agoraKit userOfflineBlock:^(NSUInteger uid) {
//        NSInteger index = [self.uids indexOfObject:@(uid)];
//        if (index != NSNotFound) {
//            NSIndexPath *indexPath = [NSIndexPath indexPathForRow:index inSection:0];
//            [self.uids removeObjectAtIndex:index];
//            [self.collectionView deleteItemsAtIndexPaths:@[indexPath]];
//        }
//    }];
    

    
//    [self.agoraKit connectionLostBlock:^{
//        [self showAlertLabelWithString:NSLocalizedString(@"no_network", nil)];
//        self.videoMainView.hidden = YES;
//        self.dataTrafficLabel.text = @"0KB/s";
//    }];
    
//    [self.agoraKit userMuteVideoBlock:^(NSUInteger uid, BOOL muted) {
//        NSLog(@"user %@ mute video: %@", @(uid), muted ? @"YES" : @"NO");
//        
//        [self.videoMuteForUids setObject:@(muted) forKey:@(uid)];
//        [self.collectionView reloadItemsAtIndexPaths:@[[NSIndexPath indexPathForRow:[self.uids indexOfObject:@(uid)] inSection:0]]];
//    }];
//    
//    [self.agoraKit firstLocalVideoFrameBlock:^(NSInteger width, NSInteger height, NSInteger elapsed) {
//        NSLog(@"local video display");
//        self.videoMainView.frame = self.videoMainView.superview.bounds; // video view's autolayout cause crash
//    }];
}

#pragma mark - 

- (void)showAlertLabelWithString:(NSString *)text;
{
    self.alertLabel.hidden = NO;
    self.alertLabel.text = text;
}

- (void)hideAlertLabel
{
    self.alertLabel.hidden = YES;
}

- (void)updateTalkTimeLabel
{
    self.duration++;
    NSUInteger seconds = self.duration % 60;
    NSUInteger minutes = (self.duration - seconds) / 60;
    self.talkTimeLabel.text = [NSString stringWithFormat:@"%02ld:%02ld", (unsigned long)minutes, (unsigned long)seconds];
}

#pragma mark - 

- (IBAction)didClickBackView:(id)sender
{
    [self showAlertLabelWithString:NSLocalizedString(@"exiting", nil)];
    __weak typeof(self) weakSelf = self;
    [self.agoraKit leaveChannel:^(AgoraRtcStats *stat) {
        // Myself leave status
        [weakSelf.durationTimer invalidate];
        [weakSelf.navigationController popViewControllerAnimated:YES];
        [UIApplication sharedApplication].idleTimerDisabled = NO;
    }];
    
#if  __GUESTING__
    int i;
    i = 0;
    //[_vcSimpleSession stopMediaGuesting];
#endif
}

- (IBAction)didClickAudioMuteButton:(UIButton *)btn
{
    [self selectAudioMuteButtons:!btn.selected];
    [self.agoraKit muteLocalAudioStream:btn.selected];
}

- (IBAction)didClickSpeakerButton:(UIButton *)btn
{
    [self.agoraKit setEnableSpeakerphone:!self.agoraKit.isSpeakerphoneEnabled];
    [self selectSpeakerButtons:!self.agoraKit.isSpeakerphoneEnabled];
}

- (IBAction)didClickVideoMuteButton:(UIButton *)btn
{
    btn.selected = !btn.selected;
    [self.agoraKit muteLocalVideoStream:btn.selected];
    self.videoMainView.hidden = btn.selected;
}

- (IBAction)didClickSwitchButton:(UIButton *)btn
{
    btn.selected = !btn.selected;
    [self.agoraKit switchCamera];
}

- (IBAction)didClickHungUpButton:(UIButton *)btn
{
    [self showAlertLabelWithString:NSLocalizedString(@"exiting", nil)];
    __weak typeof(self) weakSelf = self;
    [self.agoraKit leaveChannel:^(AgoraRtcStats *stat) {
        // Myself leave status
        [weakSelf.durationTimer invalidate];
        [weakSelf.navigationController popViewControllerAnimated:YES];
        [UIApplication sharedApplication].idleTimerDisabled = NO;
    }];
    
#if  __GUESTING__
    
    //[_vcSimpleSession stopMediaGuesting];
    int i = 0;
    i = 2;
#endif
}

- (IBAction)didClickAudioButton:(UIButton *)btn
{
    // Start audio chat
    [self.agoraKit disableVideo];
    self.type = AGDChatTypeAudio;
}

- (IBAction)didClickVideoButton:(UIButton *)btn
{
    // Start video chat
    [self.agoraKit enableVideo];
    self.type = AGDChatTypeVideo;
    if (self.cameraControlButton.selected) {
        self.cameraControlButton.selected = NO;
    }
}

#pragma mark - 

- (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section
{
    return self.uids.count;
}

- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath
{
    AGDChatCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:@"CollectionViewCell" forIndexPath:indexPath];
    cell.type = self.type;
    
    // Get info
    NSNumber *uid = [self.uids objectAtIndex:indexPath.row];
    NSNumber *videoMute = [self.videoMuteForUids objectForKey:uid];
    
    if (self.type == AGDChatTypeVideo) {
        if (videoMute.boolValue) {
            cell.type = AGDChatTypeAudio;
        } else {
            cell.type = AGDChatTypeVideo;
            AgoraRtcVideoCanvas *videoCanvas = [[AgoraRtcVideoCanvas alloc] init];
            videoCanvas.uid = uid.unsignedIntegerValue;
#ifdef __TEST__
            videoCanvas.view = cell.videoView;
//cell.videoView;//nil;
#else
            videoCanvas.view = cell.videoView;
#endif
            videoCanvas.renderMode = AgoraRtc_Render_Fit;
            [self.agoraKit setupRemoteVideo:videoCanvas];
        }
    } else {
        cell.type = AGDChatTypeAudio;
    }
    
    // Audio
    cell.nameLabel.text = uid.stringValue;
    return cell;
}

#pragma mark -

- (void)setType:(AGDChatType)type
{
    _type = type;
    if (type == AGDChatTypeVideo) {
        // Control buttons
        self.videoControlView.hidden = NO;
        self.audioControlView.hidden = YES;
        
        // Video/Audio switch button
        self.videoButton.selected = YES;
        self.audioButton.selected = NO;
        
        //
        self.videoMainView.hidden = NO;
    } else {
        // Control buttons
        self.videoControlView.hidden = YES;
        self.audioControlView.hidden = NO;
        
        // Video/Audio switch button
        self.videoButton.selected = NO;
        self.audioButton.selected = YES;
        
        //
        self.videoMainView.hidden = YES;
    }
    [self.collectionView reloadData];
}

- (void)selectSpeakerButtons:(BOOL)selected
{
    for (UIButton *btn in self.speakerControlButtons) {
        btn.selected = selected;
    }
}

- (void)selectAudioMuteButtons:(BOOL)selected
{
    bFilter = !bFilter;
    return;
    
    for (UIButton *btn in self.audioMuteControlButtons) {
        btn.selected = selected;
    }
}

- (UIAlertView *)errorKeyAlert
{
    if (!_errorKeyAlert) {
        _errorKeyAlert = [[UIAlertView alloc] initWithTitle:@""
                                                    message:NSLocalizedString(@"wrong_key", nil)
                                                   delegate:self
                                          cancelButtonTitle:NSLocalizedString(@"done", nil)
                                          otherButtonTitles:nil];
    }
    return _errorKeyAlert;
}


# pragma mark - VCSimpleSession delegate callbacks

- (void) connectionStatusChanged:(VCSessionState) state
{
    switch(state) {
        case VCSessionStateStarting:
            NSLog(@"VCSessionStateStarting");
            break;
        case VCSessionStateStarted:
            NSLog(@"VCSessionStateStarted");
            break;
            //   VCSessionStateEnded,
            //VCSessionStateError
        case VCSessionStateEnded:
            NSLog(@"tzx VCSessionStateEnded");
            break;
        case VCSessionStateError:
            NSLog(@"tzx VCSessionStateError");
            break;
        default:
            break;
    }
}


- (CVPixelBufferRef) createI420BiPlanarPixelBuffer:(uint8_t *) data andLength:(int )length andWidth:(int )width andHeight:(int )height
{
    CVPixelBufferRef pixelBuffer;
    NSDictionary *options = @{(id)kCVPixelBufferIOSurfacePropertiesKey: @{}};
    
    CVPixelBufferCreate(kCFAllocatorDefault, width, height,
                        kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                        (__bridge CFDictionaryRef)options,
                        &pixelBuffer);
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    // Copy Y plane.
    void* lumaSrc = data;//[planes pointerAtIndex:0];
    void* lumaDest =  CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
    size_t lumaSize = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0) * CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
    memcpy(lumaDest, lumaSrc, lumaSize);
    
    // Interlace and copy U and V planes.
    void* chromaBlueSrc = data + width*height;//[planes pointerAtIndex:1];
    void* chromaRedSrc = data + width*height*5/4;//[planes pointerAtIndex:2];
    size_t chromaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
    size_t chromaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
    size_t chromaBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
    void* chromaBlueDest = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
    void* chromaRedDest = (char*)chromaBlueDest + 1;
    
    vImage_Buffer chromaBlue = {chromaBlueSrc, chromaHeight, chromaWidth, chromaWidth};
    vImage_Buffer chromaRed = {chromaRedSrc, chromaHeight, chromaWidth, chromaWidth};
    
    const vImage_Buffer* srcBuffers[2] = {&chromaBlue, &chromaRed};
    void* destChannels[2] = {chromaBlueDest, chromaRedDest};
    
    vImage_Error error = vImageConvert_PlanarToChunky8(srcBuffers, destChannels, 2, 2, chromaWidth, chromaHeight, chromaBytesPerRow, kvImageNoFlags);
    
    if (error) {
        NSLog(@"error during vImageConvert %ld", error);
    }
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    
    return pixelBuffer;
}


-(void)subVideoDataReceived:(CVPixelBufferRef) pixelBufferRef
                   widthTag:(int) width
                  heightTag:(int) height
{
#ifdef __PUSH_MEDIA_CDN__
    if (_vcSimpleSession) {
        [_vcSimpleSession pushCameraBuffer:pixelBufferRef posXTag:10 posYTag:20 widthTag:width heightTag:height main:NO];
    }
#endif
}

- (void) gotAudioFrameWithData:(uint8_t*)data Size:(size_t)size InNumberFrames:(int)inNumberFrames local:(bool)isLocal
{
#ifdef __PUSH_MEDIA_CDN__
    if (_vcSimpleSession) {
        [_vcSimpleSession pushPCMBuffer:data PCMLenTag:size InNumberFrames:inNumberFrames local:isLocal];
    }
#endif
}


void subscriberVideoDataReceived(uint8_t* data, size_t size, int w, int h, int rawType, bool isMain, void *context)
{
    AGDChatViewController *pThis = (__bridge AGDChatViewController*)context;
    if(pThis == NULL){
        return;
    }
    
    CVPixelBufferRef pb = [pThis createI420BiPlanarPixelBuffer:data andLength:size andWidth:w andHeight:h];
    uint32_t width = (uint32_t)CVPixelBufferGetWidth(pb);
    uint32_t height = (uint32_t)CVPixelBufferGetHeight(pb);
    
    [pThis subVideoDataReceived:pb widthTag:width heightTag:height];
    
    CVPixelBufferRelease(pb);
}


void subscriberAudioDataReceived(uint8_t* data, size_t size, int inNumberFrames, bool isLocal, void *context)
{
    AGDChatViewController *pThis = (__bridge AGDChatViewController*)context;
    if(pThis == NULL){
        return;
    }
    
    [pThis gotAudioFrameWithData:data Size:(size_t)size InNumberFrames:inNumberFrames local:isLocal];
}

@end

