//
//  JSQVoiceView.m
//  Conversation
//
//  Created by C_HAO on 15/9/25.
//  Copyright © 2015年 CHAOO. All rights reserved.
//

#import "JSQVoiceView.h"
#import "UIImage+JSQMessages.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>


@interface JSQVoiceView ()

@property(nonatomic, strong) NSDate *begenDate;

@end

@implementation JSQVoiceView

- (void)awakeFromNib {
    [super awakeFromNib];

    _label.text = @"按住说话";
    _imageView.image = [UIImage jsq_VoiceHighlight];
}

- (IBAction)longPress:(id)sender {
    __weak typeof(self) OC = self;

    AVAudioSession *avSession = [AVAudioSession sharedInstance];
    if ([avSession respondsToSelector:@selector(requestRecordPermission:)]) {
        [avSession requestRecordPermission:^(BOOL granted) {
            __strong typeof(OC) SOC = OC;
            if (granted) {
                UILongPressGestureRecognizer *longPressGestureRecognizer = (UILongPressGestureRecognizer *)sender;
                switch (longPressGestureRecognizer.state) {
                case UIGestureRecognizerStateBegan: {
                    SOC.begenDate = [NSDate date];
                    [SOC.delegate voiceState:JSQVoiceStateBegin];
                    break;
                }
                case UIGestureRecognizerStateChanged: {
                    CGPoint point = [longPressGestureRecognizer locationInView:self];

                    if (point.x < SOC.imageView.frame.origin.x || point.x > SOC.imageView.frame.origin.x + SOC.imageView.frame.size.width || point.y < SOC.imageView.frame.origin.y || point.y > SOC.imageView.frame.origin.y + SOC.imageView.frame.size.height) {
                        [SOC.delegate voiceState:JSQVoiceStateMoveOuter];
                    } else {
                        [SOC.delegate voiceState:JSQVoiceStateMoveIn];
                    }
                    break;
                }
                case UIGestureRecognizerStateEnded: {
                    CGPoint point = [longPressGestureRecognizer locationInView:self];
                    if (point.x < SOC.imageView.frame.origin.x || point.x > SOC.imageView.frame.origin.x + SOC.imageView.frame.size.width || point.y < SOC.imageView.frame.origin.y || point.y > SOC.imageView.frame.origin.y + SOC.imageView.frame.size.height) {
                        [SOC.delegate voiceState:JSQVoiceStateCancel];
                    } else {
                        NSTimeInterval timeInterval = [[NSDate date] timeIntervalSinceDate:SOC.begenDate];
                        if (timeInterval < 0.5) {
                            [SOC.delegate voiceState:JSQVoiceStateShortTime];
                        } else {
                            [SOC.delegate voiceState:JSQVoiceStateSend];
                        }
                    }
                    break;
                }
                case UIGestureRecognizerStateCancelled: {
                    [SOC.delegate voiceState:JSQVoiceStateCancel];
                    break;
                }
                case UIGestureRecognizerStateFailed: {
                    [SOC.delegate voiceState:JSQVoiceStateFailed];
                    break;
                }
                default: {
                    break;
                }
                }
            } else {
                UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"无法访问麦克风" message:@"当前未开启麦克风服务，请至设定->隐私->麦克风，开启本程序的麦克风功能" delegate:nil cancelButtonTitle:@"确定" otherButtonTitles:nil];
                
                [alertView show];
            }

        }];
    }


}

@end
