//
//  VoiceManage.m
//  RainbowFM
//
//  Created by Kratos on 14/12/30.
//  Copyright (c) 2014年 RB. All rights reserved.
//

#import "VoiceManage.h"

#import <AVFoundation/AVFoundation.h>
#import "SpeexCodeUtility.h"


#define TAG_MAX_VOICE_RECORDER_TIME 48.5

typedef NS_ENUM(NSUInteger, VoiceRecorderImageState)
{
    kVoiceStateReady = 0,
    kVoiceStateTransition,
    kVoiceStateRecording,
};

@interface VoiceManage() <AVAudioRecorderDelegate>
{
    UILongPressGestureRecognizer *longPressGesture_;
    AVAudioRecorder * recorder_;
    NSMutableArray * avAudioSessionCategory_;
    NSMutableDictionary* setting_;
    
    UIImageView *voiceRecordAnimationView_;
}

@end

@implementation VoiceManage
-(void)dealloc
{
    recorder_ = nil;
}

- (id)init
{
    self = [super init];
    if (self) {
        [self adjustVoiceRecorderImageByState:kVoiceStateReady];
        
        avAudioSessionCategory_ = [[NSMutableArray alloc] init];
        
        setting_ = [[NSMutableDictionary alloc] initWithCapacity:6 ];
        //录音格式
        [setting_ setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey: AVFormatIDKey];
        //采样率
        [setting_ setValue :[NSNumber numberWithFloat:2000.0] forKey: AVSampleRateKey];//44100.0
        //通道数
        [setting_ setValue :[NSNumber numberWithInt:1] forKey: AVNumberOfChannelsKey];
        //线性采样位数
        //[recordSettings setValue :[NSNumber numberWithInt:16] forKey: AVLinearPCMBitDepthKey];
        //音频质量,采样质量
        [setting_ setValue:[NSNumber numberWithInt:AVAudioQualityMin] forKey:AVEncoderAudioQualityKey];
        
    }
    return self;
}

-(void)addlongPressGestureWith:(UIView *)view{
    longPressGesture_ = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(manageNormalVoice:)];
    longPressGesture_.minimumPressDuration = 0.8;
//    longPressGesture_.allowableMovement = 44.0;
    [view addGestureRecognizer:longPressGesture_];
}


#pragma mark - 录音类型分类处理
-(void)manageNormalVoice:(UILongPressGestureRecognizer*)longPressGes
{
    switch (longPressGes.state) {
        case UIGestureRecognizerStateBegan:
        {
            [self voiceSourceFileClear];
            
            [self saveAVAudioSessionState];
            AVAudioSession * audioSession = [AVAudioSession sharedInstance];
            [audioSession setCategory:AVAudioSessionCategoryRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:nil];
            [audioSession setActive:YES withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
            
            
            NSString *fileString = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%ld.wav",time(0)]];
            
            NSURL * fileUrl = [NSURL URLWithString:fileString];
            
            recorder_ = [[AVAudioRecorder alloc] initWithURL:fileUrl settings:setting_ error:nil];
            recorder_.delegate = self;
            
            if ([recorder_ prepareToRecord] && [recorder_ record]/*[recorder_ recordAtTime:time(0) + 0.5]*/) {
                [self performSelector:@selector(maxVoiceRecordTimeWith:) withObject:recorder_ afterDelay:TAG_MAX_VOICE_RECORDER_TIME + 0.1];
                [self performSelector:@selector(adjustVoiceRecorder) withObject:nil afterDelay:0.02];
                NSLog(@"voice start:");
            }
        }
            break;
            
        case UIGestureRecognizerStateEnded:
        {
            [NSObject cancelPreviousPerformRequestsWithTarget:self];
            [recorder_ stop];
            NSLog(@"press end to stopRecorder");
            
            [self adjustVoiceRecorderImageByState:kVoiceStateReady];
        }
            break;
            
        case UIGestureRecognizerStateFailed:
        {
            NSLog(@"voice failed");
            
            [self adjustVoiceRecorderImageByState:kVoiceStateReady];
        }
            break;
            
        case UIGestureRecognizerStateCancelled:
        {
            if (![longPressGesture_ isEnabled] && [recorder_ isRecording]) {
                [recorder_ stop];
                NSLog(@"press cancell to stopRecorder");
            }
            longPressGesture_.enabled = YES;
            
            [self adjustVoiceRecorderImageByState:kVoiceStateReady];
        }
            break;
            
        default:
            break;
    }
    
}


#pragma mark - Voice Source File Management
-(void)voiceSourceFileClear
{
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
        NSFileManager * fileManage = [NSFileManager defaultManager];
        NSArray * fileArray = [fileManage contentsOfDirectoryAtPath:NSTemporaryDirectory() error:nil];
        
        for (NSString *file in fileArray) {
            if ([file hasSuffix:@".wav"]) {
                NSString * tString = [file substringWithRange:NSMakeRange(file.length - 14, 10)];
                time_t t = [tString intValue];
                if (time(0) - t > 30 * 60) {
                    [fileManage removeItemAtPath:file error:nil];
                }
            }
        }
        
    });
}

#pragma mark - View state Image Control
-(void)adjustVoiceRecorder
{
    [self adjustVoiceRecorderImageByState:kVoiceStateRecording];
}

-(void)adjustVoiceRecorderImageByState:(VoiceRecorderImageState)state
{
    [self voiceRecordAnimationWithState:state];
}

-(void)voiceRecordAnimationWithState:(VoiceRecorderImageState)state
{
    if (!voiceRecordAnimationView_) {
        voiceRecordAnimationView_ = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"voiceAnimation_backGroundView"]];
        voiceRecordAnimationView_.backgroundColor = [UIColor clearColor];
        voiceRecordAnimationView_.center = CGPointMake(160.0, 250.0);
        
        
        UIImageView * animationView = [[UIImageView alloc] initWithFrame:CGRectMake(100.0, 57.0, 27.0, 58.0)];
        animationView.tag = 1000;
        animationView.backgroundColor = [UIColor clearColor];
        animationView.animationDuration = 1.0;
        animationView.animationRepeatCount = HUGE_VALF;
        animationView.animationImages = @[[UIImage imageNamed:@"voiceAnimation_view_0"], [UIImage imageNamed:@"voiceAnimation_view_1"]];
        [voiceRecordAnimationView_ addSubview:animationView];
    }
    
    
    UIImageView * animationView = (UIImageView*)[voiceRecordAnimationView_ viewWithTag:1000];
    if (state == kVoiceStateRecording) {
        
        [[[UIApplication sharedApplication].windows firstObject] addSubview:voiceRecordAnimationView_];
        
        if (![animationView isAnimating]) {
            [animationView startAnimating];
        }
    } else {
        if ([animationView isAnimating]) {
            [animationView stopAnimating];
        }
        
        [voiceRecordAnimationView_ removeFromSuperview];
    }
}

#pragma mark - Voice Recorder Handle Method
-(void)maxVoiceRecordTimeWith:(AVAudioRecorder*)recorder
{
    if ([recorder_ isEqual:recorder] && (longPressGesture_.state == UIGestureRecognizerStateChanged || longPressGesture_.state == UIGestureRecognizerStateBegan)) {
        longPressGesture_.enabled = NO;
    }
}

-(void)saveAVAudioSessionState
{
    AVAudioSession * avAudioSession = [AVAudioSession sharedInstance];
    [avAudioSessionCategory_ addObject:avAudioSession.category];
    [avAudioSessionCategory_ addObject:[NSNumber numberWithUnsignedInteger:avAudioSession.categoryOptions]];
}

-(void)reStoreAVAudioSessionState
{
    if ([avAudioSessionCategory_ count] >= 2) {
        AVAudioSession * avAudioSession = [AVAudioSession sharedInstance];
        [avAudioSession setCategory:avAudioSessionCategory_[0] withOptions:[avAudioSessionCategory_[1] unsignedIntegerValue] error:nil];
        
        [avAudioSessionCategory_ removeAllObjects];
    }
}

#pragma mark - AVAudioRecorderDelegate Method
-(void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag
{
    if (flag) {
        NSURL * filePath = [recorder url];
        if (filePath) {
            if (_voiceRecorderDelegate && [_voiceRecorderDelegate respondsToSelector:@selector(didFinishRecord:encodeVoiceData:)]) {
                NSFileManager *fileManager = [NSFileManager defaultManager];
                NSData *fileData = [fileManager contentsAtPath:filePath.path];
                [_voiceRecorderDelegate didFinishRecord:self encodeVoiceData:fileData];
            }
            //[self handleAVAudioByCafFilePath:filePath];
        }
    } else {
        [self voiceRecordeErrorWithState:0];
        NSLog(@"recorder finish record fail");
    }
    
    [self reStoreAVAudioSessionState];
}

- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder error:(NSError *)error
{
    if ([recorder_ isEqual:recorder] && longPressGesture_.state == UIGestureRecognizerStateChanged) {
        longPressGesture_.enabled = NO;
    }
    
    [self voiceRecordeErrorWithState:0];
    [self reStoreAVAudioSessionState];
}

#pragma mark - AVAudio Data Handle Method
-(void)handleAVAudioByCafFilePath:(NSURL*)path
{
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
        
        AudioFileID audioFile;
        OSStatus theErr = noErr;
        UInt64 fileDataSize = 0;
        AudioStreamBasicDescription theFileFormat;
        UInt32 thePropertySize = sizeof(theFileFormat);
        
        theErr = AudioFileOpenURL((__bridge CFURLRef)path, kAudioFileReadPermission, 0, &audioFile);
        
        thePropertySize = sizeof(fileDataSize);
        theErr = AudioFileGetProperty(audioFile, kAudioFilePropertyAudioDataByteCount, &thePropertySize, &fileDataSize);
        
        UInt32 dataSize = (UInt32)fileDataSize;
        void* theData = malloc(dataSize);
        if (theData) {
            AudioFileReadBytes(audioFile, false, 0, &dataSize, theData);
            
            NSData *speexData = encodePCMToRawSpeex(theData , dataSize , 1, 16);
            //NSData *speexData = encodeWAVE2Speex(data1, 1, 16);
            
            dispatch_async(dispatch_get_main_queue(), ^{
                
                if ([speexData length] > 80) {
                    
                    if (_voiceRecorderDelegate && [_voiceRecorderDelegate respondsToSelector:@selector(didFinishRecord:encodeVoiceData:)]) {
                        [_voiceRecorderDelegate didFinishRecord:self encodeVoiceData:speexData];
                    }
                } else {
                    [self voiceRecordeErrorWithState:0];
                }
            });
        } else {
            dispatch_async(dispatch_get_main_queue(), ^{
                [self voiceRecordeErrorWithState:0];
            });
        }
        
        free(theData);
        AudioFileClose(audioFile);
        
    });
}

-(void)voiceRecordeErrorWithState:(NSUInteger)state
{
    if (_voiceRecorderDelegate && [_voiceRecorderDelegate respondsToSelector:@selector(voiceRecorder:errorWithState:)]) {
        [_voiceRecorderDelegate voiceRecorder:self errorWithState:state];
    }
}

@end
