//
//  ViewController.m
//  UpgradeClassWkWebview
//
//  Created by ZCL_GUOKAI on 2022/4/20.
//

#import "ViewController.h"
#import <WebKit/WebKit.h>
#import "AppDelegate.h"
#import "AFNetworking.h"

#define SCREEN_WIDTH [UIScreen mainScreen].bounds.size.width
#define SCREEN_HEIGHT [UIScreen mainScreen].bounds.size.height

@interface ViewController ()<WKUIDelegate,WKNavigationDelegate>
-(void)getTokenFromBasicSample:(NSString *)basicServer onSuccess:(void (^)(NSString *))onSuccess onFailure:(void (^)())onFailure;

@property (nonatomic) IRTCConferenceClient* conferenceClient;
@property(strong, nonatomic) IRTCConferencePublication* cameraPublication;
@property(strong, nonatomic) IRTCConferencePublication* sharePublication;
@property (nonatomic,strong)WKWebView *webView;

@end

@implementation ViewController

- (void)showMsg: (NSString *)msg
{
  UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"" message:msg preferredStyle:UIAlertControllerStyleAlert];
  UIAlertAction *okAction = [UIAlertAction actionWithTitle:@"好的" style:UIAlertActionStyleCancel handler:nil];
  [alertController addAction:okAction];
  [self presentViewController:alertController animated:YES completion:nil];
}

- (void)viewDidLoad {
    [super viewDidLoad];
    self.view.backgroundColor = [UIColor whiteColor];
    [self loadWeb];
    [self streamView];
    appDelegate = (AppDelegate*)[[UIApplication sharedApplication]delegate];
    _conferenceClient=[appDelegate conferenceClient];    // Do any additional setup after loading the view.
}

-(void)getTokenFromBasicSample:(NSString *)basicServer onSuccess:(void (^)(NSString *))onSuccess onFailure:(void (^)())onFailure{
    AFHTTPRequestOperationManager *manager = [AFHTTPRequestOperationManager manager];
    manager.requestSerializer = [AFJSONRequestSerializer serializer];
    [manager.requestSerializer setValue:@"*/*" forHTTPHeaderField:@"Accept"];
    [manager.requestSerializer setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
    manager.responseSerializer = [AFHTTPResponseSerializer serializer];
    manager.securityPolicy.allowInvalidCertificates=NO;
    manager.securityPolicy.validatesDomainName=YES;
    NSDictionary *params = [[NSDictionary alloc]initWithObjectsAndKeys:@"", @"room", @"ios", @"username", @"presenter", @"role", nil];
    [manager POST:[basicServer stringByAppendingString:@"createToken/"] parameters:params success:^(AFHTTPRequestOperation *operation, id responseObject) {
        NSData* data=[[NSData alloc]initWithData:responseObject];
        onSuccess([[NSString alloc]initWithData:data encoding:NSUTF8StringEncoding]);
    } failure:^(AFHTTPRequestOperation *operation, NSError *error) {
        NSLog(@"Error: %@", error);
    }];
}

-(void)mixToCommonView:(IRTCConferencePublication* )publication{
    AFHTTPRequestOperationManager *manager = [AFHTTPRequestOperationManager manager];
    manager.requestSerializer = [AFJSONRequestSerializer serializer];
    [manager.requestSerializer setValue:@"*/*" forHTTPHeaderField:@"Accept"];
    [manager.requestSerializer setValue:@"application/json" forHTTPHeaderField:@"Content-Type"];
    manager.responseSerializer = [AFHTTPResponseSerializer serializer];
    manager.securityPolicy.allowInvalidCertificates=NO;
    manager.securityPolicy.validatesDomainName=YES;
    NSDictionary *params = [[NSDictionary alloc]initWithObjectsAndKeys:@"add", @"op", @"/info/inViews", @"path", @"common", @"value", nil];
    NSArray* paramsArray=[NSArray arrayWithObjects:params, nil];
    [manager PATCH:[NSString stringWithFormat:@"%@rooms/%@/streams/%@", appDelegate.server, appDelegate.conferenceId, publication.publicationId ] parameters:paramsArray success:nil failure:^(AFHTTPRequestOperation *operation, NSError *error) {
        NSLog(@"Error: %@", error);
    }];
}

-(void)join:(id)sender{
    NSLog(@"join btn pressed!");
    appDelegate.server = @"http://192.168.31.60:3001/";
    __block id safeBlockSelf = self;
    [self getTokenFromBasicSample:appDelegate.server onSuccess:^(NSString *token) {
        [_conferenceClient joinWithToken:token onSuccess:^(IRTCConferenceInfo* info) {
            NSLog(@"Connection success!");
            appDelegate.conferenceId = info.conferenceId;
            appDelegate.myself =  info.myself;
            for(IRTCRemoteStream* stream in info.remoteStreams){
                if (stream.source.video == IRTCVideoSourceInfoMixed) {
                    appDelegate.mixedStream = stream;
                }
                else{
                    appDelegate.forwardStreams[stream.streamId] = stream;
                    NSString* user = stream.origin;
                }
                stream.delegate = appDelegate;
            }
            dispatch_async(dispatch_get_main_queue(), ^{
                //((ConferenceConnectionViewController *)safeBlockSelf).connectBtn.enabled = false;
                //((ConferenceConnectionViewController *)safeBlockSelf).disconnectBtn.enabled = true;
                [safeBlockSelf showMsg:@"Connect success!"];
            });
            NSLog(@"Connection success!");
        } onFailure:^(NSError *msg) {
            dispatch_async(dispatch_get_main_queue(), ^{
                [safeBlockSelf showMsg:[msg localizedDescription]];
            });
            NSLog(@"Connection failed! %@", msg);
        }];
    } onFailure:^{
        NSLog(@"Failed to get token from basic server.");
    }];
}

-(void)publish:(id)sender{
    NSLog(@"publish btn pressed!");
    if(_localVideoView == nil){
        _localVideoView = [[IRTCMTLVideoView alloc] init];
        CGRect localVideoViewFrame=CGRectZero;
        localVideoViewFrame.size.width = 160;
        localVideoViewFrame.size.height = 120;
        _localVideoView.frame=localVideoViewFrame;
        _localVideoView.layer.borderColor = [UIColor blackColor].CGColor;
        _localVideoView.layer.borderWidth = 2.0;
        CGPoint p = {.x = localVideoViewFrame.size.width/2, .y = 400};
        _localVideoView.layer.position = p;

        [_localVideoView.layer setMasksToBounds:YES];
        [_stream addSubview:_localVideoView];
    }
    return;
    if(appDelegate.localStream == nil) {
        IRTCStreamConstraints* constraints = [[IRTCStreamConstraints alloc] init];
        constraints.audio = YES;
        constraints.video = [[IRTCVideoTrackConstraints alloc] init];
        constraints.video.frameRate = 24;
        constraints.video.resolution = CGSizeMake(1920, 1080);
        constraints.video.devicePosition = AVCaptureDevicePositionBack;
        appDelegate.localStream = [[IRTCLocalCameraStream alloc] initWithConstratins:constraints error:nil];
        dispatch_async(dispatch_get_main_queue(), ^{
            // As IRTCCameraPreviewView renders video frame an AVCaptureSession, it does not reflect filter applied. If you want to show filtered video, please attach LocalStream to RTCVideoRenderer.
            [appDelegate.localStream attach:_localVideoView];
        });
    }
    IRTCPublishOptions* options = [[IRTCPublishOptions alloc] init];
    NSMutableArray<IRTCVideoEncodingParameters*>* videos = [[NSMutableArray alloc] init];
    IRTCVideoEncodingParameters* parameters = [[IRTCVideoEncodingParameters alloc] init];
    IRTCVideoCodecParameters* codec = [[IRTCVideoCodecParameters alloc] init];
    IRTCVideoEncodingParameters* video = [[IRTCVideoEncodingParameters alloc]init];
    codec.name = IRTCVideoCodecH264;
    parameters.codec = codec;
    video.codec = codec;
    options.video = videos;
    [videos addObject:video];
    [_conferenceClient publish:appDelegate.localStream
        withOptions:options
        onSuccess:^(IRTCConferencePublication* p) {
            _cameraPublication=p;
            _cameraPublication.delegate = appDelegate;
            [self mixToCommonView:p];
            dispatch_async(dispatch_get_main_queue(), ^{
                [self showMsg:@"publish success!"];
                NSLog(@"publish success!");
            });
        }
        onFailure:^(NSError* err) {
            dispatch_async(dispatch_get_main_queue(), ^{
                NSLog(@"publish failure!");
                [self showMsg:@"publish failed!"];
            });
        }
    ];
}

-(void)share:(id)sender{
    NSLog(@"share btn pressed!");
    if(appDelegate.shareStream == nil) {
        IRTCShareScreenConstraints* constraints = [[IRTCShareScreenConstraints alloc] init];
        constraints.audio = FALSE;
        constraints.video = TRUE;
        appDelegate.shareStream = [[IRTCShareScreenStream alloc] initWithConstratins:constraints error:nil];
    }
    IRTCPublishOptions* options = [[IRTCPublishOptions alloc] init];
    NSMutableArray<IRTCVideoEncodingParameters*>* videos = [[NSMutableArray alloc] init];
    IRTCVideoEncodingParameters* parameters = [[IRTCVideoEncodingParameters alloc] init];
    IRTCVideoCodecParameters* codec = [[IRTCVideoCodecParameters alloc] init];
    IRTCVideoEncodingParameters* video = [[IRTCVideoEncodingParameters alloc]init];
    codec.name = IRTCVideoCodecH264;
    parameters.codec = codec;
    video.codec = codec;
    options.video = videos;
    [videos addObject:video];
    [_conferenceClient publish:appDelegate.shareStream
                   withOptions:options
        onSuccess:^(IRTCConferencePublication* p) {
            _sharePublication=p;
            _sharePublication.delegate = appDelegate;
            [self mixToCommonView:p];
            dispatch_async(dispatch_get_main_queue(), ^{
                [self showMsg:@"publish success!"];
                NSLog(@"publish success!");
            });
        }
        onFailure:^(NSError* err) {
            dispatch_async(dispatch_get_main_queue(), ^{
                NSLog(@"publish failure!");
                [self showMsg:@"publish failed!"];
            });
        }
    ];
}

-(void)subscribe:(id)sender{
    NSLog(@"subscribe btn pressed!");
    if(_remoteVideoView == nil){
        _remoteVideoView = [[IRTCMTLVideoView alloc]init];
        CGRect remoteVideoViewFrame=CGRectZero;
        remoteVideoViewFrame.size.width = 160;
        remoteVideoViewFrame.size.height = 120;
        _remoteVideoView.frame=remoteVideoViewFrame;
        _remoteVideoView.layer.borderColor = [UIColor blackColor].CGColor;
        _remoteVideoView.layer.borderWidth = 2.0;
        CGPoint p = {.x = remoteVideoViewFrame.size.width/2, .y = 600};
        _remoteVideoView.layer.position = p;

        [_remoteVideoView.layer setMasksToBounds:YES];
        [_stream addSubview:_remoteVideoView];
    }

    for (NSString *key in appDelegate.forwardStreams) {
        NSLog(@"stream:%@", ((IRTCRemoteStream*)appDelegate.forwardStreams[key]).streamId);
        
        IRTCRemoteStream* stream = appDelegate.forwardStreams[key];
        if(stream.origin != appDelegate.myself.userId) {
            [_conferenceClient subscribe:appDelegate.forwardStreams[key]
                withOptions:nil
                onSuccess:^(IRTCConferenceSubscription* subscription) {
                    dispatch_async(dispatch_get_main_queue(), ^{
                        [self showMsg:@"Subscribe stream success!"];
                        NSLog(@"Subscribe stream success.");
                        appDelegate.subscriptions[key] = subscription;
                        [appDelegate.forwardStreams[key] attach:_remoteVideoView];
                        subscription.delegate = appDelegate;
                        appDelegate.subscription = subscription;
                        NSLog(@"stream:%@", ((IRTCRemoteStream*)appDelegate.forwardStreams[key]).streamId);
                    });
                }
                onFailure:^(NSError *error) {
                    dispatch_async(dispatch_get_main_queue(), ^{
                        NSLog(@"Subscribe stream failed.%@",error);
                        [self showMsg:@"Subscribe stream failed."];
                    });
                }
            ];
        }
    }
}
-(void)muteAudio:(id)sender{
    if(_cameraPublication) {
        [_cameraPublication mute:IRTCTrackKindAudio onSuccess:^{
            NSLog(@"mute audio success.");
        } onFailure:^(NSError * _Nonnull) {
            NSLog(@"mute audio failed.");
        }];
    }
}

-(void)unmuteAudio:(id)sender{
    if(_cameraPublication) {
        [_cameraPublication unmute:IRTCTrackKindAudio onSuccess:^{
            NSLog(@"unmute audio success.");
        } onFailure:^(NSError * _Nonnull) {
            NSLog(@"unmute audio failed.");
        }];
    }
}

-(void)muteVideo:(id)sender{
    if(_cameraPublication) {
        [_cameraPublication mute:IRTCTrackKindVideo onSuccess:^{
            NSLog(@"mute video success.");
        } onFailure:^(NSError * _Nonnull) {
            NSLog(@"mute video failed.");
        }];
    }
}

-(void)unmuteVideo:(id)sender{
    if(_cameraPublication) {
        [_cameraPublication unmute:IRTCTrackKindVideo onSuccess:^{
            NSLog(@"unmute video success.");
        } onFailure:^(NSError * _Nonnull) {
            NSLog(@"unmute video failed.");
        }];
    }
}

-(void)streamView{
    _stream = [[UIView alloc]initWithFrame:CGRectMake(SCREEN_WIDTH * 4 / 5,0 , SCREEN_WIDTH / 5, SCREEN_HEIGHT )];
    [self.view addSubview:_stream];
    _stream.backgroundColor = [UIColor lightGrayColor];
    UILabel *topLab = [[UILabel alloc]initWithFrame:CGRectMake(10, 10, 80, 30)];
    [_stream addSubview:topLab];
    topLab.text = @"原生控件";
    topLab.textColor = [UIColor redColor];
    topLab.font = [UIFont boldSystemFontOfSize:12];
    UIButton *joinBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 40, 80, 30)];
    [joinBtn setTitle:@"join" forState:UIControlStateNormal];
    [joinBtn addTarget:self action:@selector(join:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:joinBtn];
    
    UIButton *publishBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 70, 80, 30)];
    [publishBtn setTitle:@"publish" forState:UIControlStateNormal];
    [publishBtn addTarget:self action:@selector(publish:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:publishBtn];
    
    UIButton *shareBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 100, 80, 30)];
    [shareBtn setTitle:@"share" forState:UIControlStateNormal];
    [shareBtn addTarget:self action:@selector(share:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:shareBtn];
    
    UIButton *subscribeBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 130, 80, 30)];
    [subscribeBtn setTitle:@"subscribe" forState:UIControlStateNormal];
    [subscribeBtn addTarget:self action:@selector(subscribe:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:subscribeBtn];
    
    UIButton *muteAudioBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 160, 80, 30)];
    [muteAudioBtn setTitle:@"muteAudio" forState:UIControlStateNormal];
    [muteAudioBtn addTarget:self action:@selector(muteAudio:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:muteAudioBtn];
    
    UIButton *unmuteAudioBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 190, 80, 30)];
    [unmuteAudioBtn setTitle:@"unmuteAudio" forState:UIControlStateNormal];
    [unmuteAudioBtn addTarget:self action:@selector(unmuteAudio:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:unmuteAudioBtn];
    
    UIButton *muteVideoBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 220, 80, 30)];
    [muteVideoBtn setTitle:@"muteVideo" forState:UIControlStateNormal];
    [muteVideoBtn addTarget:self action:@selector(muteVideo:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:muteVideoBtn];
    
    UIButton *unmuteVideoBtn = [[UIButton alloc] initWithFrame:CGRectMake(0, 250, 80, 30)];
    [unmuteVideoBtn setTitle:@"unmuteVideo" forState:UIControlStateNormal];
    [muteVideoBtn addTarget:self action:@selector(unmuteVideo:) forControlEvents:UIControlEventTouchUpInside];
    [_stream addSubview:unmuteVideoBtn];
}
#pragma mark 这一部分暂时不需要动
-(void)loadWeb{
    WKWebViewConfiguration *config = [[WKWebViewConfiguration alloc] init];
    WKPreferences *preference = [[WKPreferences alloc]init];
    preference.minimumFontSize = 0;
    preference.javaScriptCanOpenWindowsAutomatically = YES;
    config.preferences = preference;
    config.allowsInlineMediaPlayback = YES;
    config.mediaTypesRequiringUserActionForPlayback = YES;
    config.allowsPictureInPictureMediaPlayback = YES;
    config.applicationNameForUserAgent = @"ChinaDailyForiPad";
    
    WKUserContentController * wkUController = [[WKUserContentController alloc] init];
    config.userContentController = wkUController;
    
    _webView = [[WKWebView alloc] initWithFrame:CGRectMake(0, 0, SCREEN_WIDTH * 4 / 5, SCREEN_HEIGHT ) configuration:config];
    _webView.UIDelegate = self;
    _webView.navigationDelegate = self;
    _webView.allowsBackForwardNavigationGestures = YES;
    [_webView goBack];
    [_webView goForward];
    [_webView reload];
    NSURL * url = [NSURL URLWithString:@"https://www.baidu.com/"];
    NSURLRequest * request = [NSURLRequest requestWithURL:url];
    [_webView loadRequest:request];
    [self.view addSubview:_webView];
}

@end
