//
//  RecordViewController.m
//  EZAudioRecordExample
//
//  Created by RainSets on 12/15/13.
//  Copyright (c) 2013 RainSets. All rights reserved.
//

#import "RecordViewController.h"
#import "TTCounterLabel.h"
#import "MediaImproveController.h"

typedef NS_ENUM(NSInteger, kTTCounter){
    kTTCounterRunning = 0,
    kTTCounterStopped,
    kTTCounterReset,
    kTTCounterEnded
};

@interface RecordViewController ()<TTCounterLabelDelegate>
// Using AVPlayer for example
- (IBAction)cancelRecordAction:(id)sender;
@property (weak, nonatomic) IBOutlet UIButton *btnStartStopRecord;
- (IBAction)startStopRecordAction:(id)sender;
@property (weak, nonatomic) IBOutlet UIButton *btnDoneRecord;
- (IBAction)doneRecordAction:(id)sender;

@property (weak, nonatomic) IBOutlet TTCounterLabel *counterLabel;
@end

@implementation RecordViewController
@synthesize audioPlot;
@synthesize microphone;

@synthesize stRecord = _stRecord;
#pragma mark - Initialization

- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
    if (self) {
        // Custom initialization
        [self initializeViewController];
    }
    return self;
}

-(id)init {
  self = [super init];
  if(self){
    [self initializeViewController];
  }
  return self;
}

-(id)initWithCoder:(NSCoder *)aDecoder {
  self = [super initWithCoder:aDecoder];
  if(self){
    [self initializeViewController];
  }
  return self;
}

#pragma mark - Initialize View Controller Here
-(void)initializeViewController {
    // Create an instance of the microphone and tell it to use this view controller instance as the delegate
    self.microphone = [EZMicrophone microphoneWithDelegate:self];
}

#pragma mark - Customize the Audio Plot
-(void)viewDidLoad {
  
  [super viewDidLoad];
    // Optional
    [self customiseAppearance];
  /*
   Customizing the audio plot's look
   */
    [self.audioPlot setFrame:CGRectMake(self.audioPlot.frame.origin.x, self.audioPlot.frame.origin.y, self.view.frame.size.width / 2.f - self.audioPlot.frame.origin.x, self.audioPlot.frame.size.height)];
  // Background color
  self.audioPlot.backgroundColor = [UIColor blackColor];
  // Waveform color
  self.audioPlot.color           = [UIColor colorWithRed:76.f/255.f green:214.f/255.f blue:193.f/255.f alpha:1.0];
  // Plot type
  self.audioPlot.plotType        = EZPlotTypeRolling;
  // Fill
  self.audioPlot.shouldFill      = YES;
  // Mirror
  self.audioPlot.shouldMirror    = YES;
  
  
//  self.microphoneTextField.text = @"Microphone On";
//  self.recordingTextField.text = @"Not Recording";
//  self.playingTextField.text = @"Not Playing";
  
    [_btnStartStopRecord setTitle:@"Record" forState:UIControlStateNormal];
  // Hide the done button
    _btnDoneRecord.hidden = YES;
  
  /*
   Log out where the file is being written to within the app's documents directory
   */
  NSLog(@"File written to application sandbox's documents directory: %@",[self testFilePathURL]);
  
}

#pragma mark - EZMicrophoneDelegate
#warning Thread Safety
// Note that any callback that provides streamed audio data (like streaming microphone input) happens on a separate audio thread that should not be blocked. When we feed audio data into any of the UI components we need to explicity create a GCD block on the main thread to properly get the UI to work.
-(void)microphone:(EZMicrophone *)microphone
 hasAudioReceived:(float **)buffer
   withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels {
  // Getting audio data as an array of float buffer arrays. What does that mean? Because the audio is coming in as a stereo signal the data is split into a left and right channel. So buffer[0] corresponds to the float* data for the left channel while buffer[1] corresponds to the float* data for the right channel.
  
  // See the Thread Safety warning above, but in a nutshell these callbacks happen on a separate audio thread. We wrap any UI updating in a GCD block on the main thread to avoid blocking that audio flow.
  dispatch_async(dispatch_get_main_queue(),^{
    // All the audio plot needs is the buffer data (float*) and the size. Internally the audio plot will handle all the drawing related code, history management, and freeing its own resources. Hence, one badass line of code gets you a pretty plot :)
    [self.audioPlot updateBuffer:buffer[0] withBufferSize:bufferSize];
  });
}

-(void)microphone:(EZMicrophone *)microphone
    hasBufferList:(AudioBufferList *)bufferList
   withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels {
  
  // Getting audio data as a buffer list that can be directly fed into the EZRecorder. This is happening on the audio thread - any UI updating needs a GCD main queue block. This will keep appending data to the tail of the audio file.
  if( self.isRecording ){
    [self.recorder appendDataFromBufferList:bufferList
                             withBufferSize:bufferSize];
  }
  
}

#pragma mark - Utility
-(NSArray*)applicationDocuments {
  return NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
}

-(NSString*)applicationDocumentsDirectory
{
  NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
  NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
  return basePath;
}

-(NSURL*)testFilePathURL {
  return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/%@",
                                 [self applicationDocumentsDirectory],
                                 kAudioFilePath]];
}

- (IBAction)cancelRecordAction:(id)sender {
    [self dismissViewControllerAnimated:YES completion:nil];
}
- (IBAction)startStopRecordAction:(id)sender {
    
    if (_stRecord == stopRecord) {
        _stRecord = startRecord;
        [_counterLabel reset];//reset running time
        [_counterLabel start];//start running time
        [_btnStartStopRecord setTitle:@"Recording" forState:UIControlStateNormal];
        if (self.isRecording) {
            [self initializeViewController];
        }
        /*
         Start the microphone
         */
        [self.microphone startFetchingAudio];
        self.isRecording = TRUE;
        _btnDoneRecord.hidden = TRUE;
        /*
         Create the recorder
         */
        self.recorder = [EZRecorder recorderWithDestinationURL:[self testFilePathURL]
                                                  sourceFormat:self.microphone.audioStreamBasicDescription
                                           destinationFileType:EZRecorderFileTypeM4A];
    }else{
        _stRecord = stopRecord;
        [_counterLabel stop];//stop running time
        [_btnStartStopRecord setTitle:@"Record" forState:UIControlStateNormal];
        /*
         Start the microphone
         */
        [self.microphone stopFetchingAudio];
        [self.recorder closeAudioFile];
        _btnDoneRecord.hidden = FALSE;
        [self.audioPlot setNeedsDisplay];
        [self.audioPlot setNeedsLayout];
    }
}
- (IBAction)doneRecordAction:(id)sender {
    DLog(@"recordFileUrl=%@",[self testFilePathURL]);
    MediaImproveController *mediaImpVC = [[MediaImproveController alloc] initWithNibName:@"MediaImproveController" bundle:nil];
    [mediaImpVC setBtnDetailAudio:[self testFilePathURL]];
    [self.navigationController pushViewController:mediaImpVC animated:YES];
}


#pragma mark - Private

- (void)customiseAppearance {
    [_counterLabel setBoldFont:[UIFont systemFontOfSize:25.f]];
    [_counterLabel setRegularFont:[UIFont systemFontOfSize:25.f]];
    
    // The font property of the label is used as the font for H,M,S and MS
    [_counterLabel setFont:[UIFont systemFontOfSize:25.f]];
    
    // Default label properties
    _counterLabel.textColor = [UIColor darkGrayColor];
    
    // After making any changes we need to call update appearance
    [_counterLabel updateApperance];
}
@end
