//
//  ViewController.m
//  Alarm3DExperiments2
//
//  Created by 우성 김 on 12. 3. 29..
//  Copyright (c) 2012년 yy01ar@gmail.com. All rights reserved.
//

#import "ViewController.h"
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
#import "DDLog.h"

#ifdef DEBUG
static const int ddLogLevel = LOG_LEVEL_VERBOSE;
#else
static const int ddLogLevel = LOG_LEVEL_WARN;
#endif

@interface ViewController ()

@end

@implementation ViewController
@synthesize uislider;
@synthesize uislider2;


- (void)viewDidLoad
{
    [super viewDidLoad];
	//Do any additional setup after loading the view, typically from a nib.
    
//    NSString *path = [[NSBundle mainBundle]pathForResource:@"alarm_Disco_3s" ofType:@"m4a"];
//    audioPlayer = [[AVAudioPlayer alloc]initWithContentsOfURL:[NSURL fileURLWithPath:path] error:nil];
//    audioPlayer.volume = 1.0;
//    [audioPlayer play];


//    [mediaPicker release];
//    
//    NSString *path = [[NSBundle mainBundle]pathForResource:@"Ellen+McLain+-+Still+Alive" ofType:@"mp3"];
//    NSLog(path);
//    
//    player =
//        [[MPMoviePlayerController alloc] initWithContentURL: [NSURL fileURLWithPath:path ]];
//    [player prepareToPlay];
//    [player.view setFrame: self.view.bounds];  // player's frame must match parent's
//    [self.view addSubview: player.view];
//    // ...
//    [player play];
//    
    
    
    // Generate arithmetic number code
//    for(int i = 0 ; i < 100 ; i++) {
//        int answer = arc4random() % 89 + 10;
//        int numbers[3];
//        numbers[2] = arc4random() % 98 + 1; //(1 ~ 99)
//    
//        do {
//            numbers[1] = arc4random() % 98 + 1;   
//            numbers[0] = numbers[2] + answer - numbers[1];
//        } while(numbers[0] > 99 || numbers[0] < 1);
//    
//        NSLog(@"calculation : %2d + %2d - %2d = %2d\n", numbers[0], numbers[1], numbers[2], answer);
//    }
    
    /*
    MPMediaQuery *everything = [[MPMediaQuery alloc] init];
    
    NSLog(@"Logging items from a generic query...");
    NSArray *itemsFromGenericQuery = [everything items];
    for (MPMediaItem *song in itemsFromGenericQuery) {
        NSString *songTitle = [song valueForProperty: MPMediaItemPropertyTitle];
        NSLog (@"%@", songTitle);
    }
    
    musicPlayer = [MPMusicPlayerController iPodMusicPlayer];
     */
[uislider setThumbImage:[UIImage imageNamed:@"sound-volume@2X.png"] forState:UIControlStateNormal];
[uislider setThumbImage:[UIImage imageNamed:@"sound-volume@2X.png"] forState:UIControlStateHighlighted];

[uislider2 setThumbImage:[UIImage imageNamed:@"sound-volume.png"] forState:UIControlStateNormal];
[uislider2 setThumbImage:[UIImage imageNamed:@"sound-volume.png"] forState:UIControlStateHighlighted];

[uislider setMinimumTrackImage:[UIImage imageNamed:@"sound-volume_color_bar@2X.png"] forState:UIControlStateNormal ];
[uislider setMaximumTrackImage:[UIImage imageNamed:@"sound-volume_color_bar_BG@2X.png"] forState:UIControlStateNormal ];

[uislider2 setMaximumTrackImage:[UIImage imageNamed:@"sound-volume_color_bar.png"] forState:UIControlStateNormal];
[uislider2 setMinimumTrackImage:[UIImage imageNamed:@"sound-volume_color_bar_BG.png"] forState:UIControlStateNormal];

}

- (void)viewDidUnload
{
    [self setUislider:nil];
    [self setUislider2:nil];
    [super viewDidUnload];
    // Release any retained subviews of the main view.
}

- (IBAction)findBundle:(id)sender {
    NSString *plistPath = [[NSBundle mainBundle] pathForResource:@"alarm-clock-1" ofType:@"wav"];
    NSLog(@"%@\n", plistPath);
}


- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
}

- (IBAction)pickItem:(id)sender {
    MPMediaPickerController *mediaPicker = [[MPMediaPickerController alloc] initWithMediaTypes:MPMediaTypeMusic];
	[mediaPicker setDelegate:self];
	[mediaPicker setAllowsPickingMultipleItems:NO];
    [self presentModalViewController:mediaPicker animated:YES];
}

// Media picker delegate methods

- (void)mediaPicker: (MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection {
	// We need to dismiss the picker
	[self dismissModalViewControllerAnimated:YES];
    
    MPMediaItem *item = [[mediaItemCollection items]objectAtIndex:0];
    pid = [item valueForProperty: MPMediaItemPropertyPersistentID];
    
    NSURL *itemUrl = [item valueForProperty:MPMediaItemPropertyAssetURL]; 

    AVURLAsset *asset = [[AVURLAsset alloc]initWithURL:itemUrl options:nil];


    
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeAudio];
	AVAssetTrack *track = [tracks objectAtIndex:0];
	id desc = [track.formatDescriptions objectAtIndex:0];
	const AudioStreamBasicDescription *audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription((__bridge CMAudioFormatDescriptionRef)desc);
    FourCharCode formatID = audioDesc->mFormatID;
    
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    
    if (!documentsDirectory) {
        NSLog(@"Documents directory not found!");        
    }

    NSString *fileType = nil;
	NSString *ex = nil;
	
	switch (formatID) {
            
        case kAudioFormatLinearPCM:
        {
            UInt32 flags = audioDesc->mFormatFlags;
            if (flags & kAudioFormatFlagIsBigEndian) {
                fileType = @"public.aiff-audio";
                ex = @"aif";
            } else {
                fileType = @"com.microsoft.waveform-audio";
                ex = @"wav";
            }
        }
            break;
            
        case kAudioFormatMPEGLayer3:
            fileType = @"com.apple.quicktime-movie";
            ex = @"mp3";
			break;
            
        case kAudioFormatMPEG4AAC:
            fileType = @"com.apple.m4a-audio";
            ex = @"m4a";
            break;
            
        case kAudioFormatAppleLossless:
            fileType = @"com.apple.m4a-audio";
            ex = @"m4a";
            break;
            
        default:
            break;
    }
    NSLog(@"File type : %@ (%@)", fileType, ex);
    NSString *exportFile = [documentsDirectory stringByAppendingPathComponent: [NSString stringWithFormat:@"exported.%@", ex]];
    if([ex isEqualToString:@"m4a"]) {
        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: asset
                                                                          presetName:AVAssetExportPresetAppleM4A];
    AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
    AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
    [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[asset.tracks objectAtIndex:0]];
    exportAudioMix.inputParameters = [NSArray
                                      arrayWithObject:exportAudioMixInputParameters]; 

    NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
    exporter.outputFileType = fileType;
    exporter.audioMix = exportAudioMix;

    [[NSFileManager defaultManager]removeItemAtURL:exportURL error:nil];
    
    exporter.outputURL = exportURL;
        
        for(int i = 0 ; i < exporter.supportedFileTypes.count ; i++) {
            NSObject *obj = [exporter.supportedFileTypes objectAtIndex:i];
            NSLog(@"%@", obj);
        }
    // do the export
    // (completion handler block omitted) 
    [exporter exportAsynchronouslyWithCompletionHandler:
     ^{
         // Do with data something
         
         if (AVAssetExportSessionStatusCompleted == exporter.status) {
             NSLog(@"AVAssetExportSessionStatusCompleted");
             
             [audioPlayer stop];
             audioPlayer = [[AVAudioPlayer alloc]initWithContentsOfURL:exportURL error:nil];
             [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
             [[AVAudioSession sharedInstance] setActive:YES error:nil];  
             [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
             audioPlayer.volume = 1.0;
             
             [audioPlayer prepareToPlay];
             UIBackgroundTaskIdentifier newTaskId = UIBackgroundTaskInvalid;
             if([audioPlayer play]) {
                 newTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
             }
             
         } else if (AVAssetExportSessionStatusFailed == exporter.status) {
             // a failure may happen because of an event out of your control
             // for example, an interruption like a phone call comming in
             // make sure and handle this case appropriately
             NSLog(@"AVAssetExportSessionStatusFailed");
         } else {
             NSLog(@"Export Session Status: %d", exporter.status);
         }

         
     }];
    }
    else {
        NSData *data = [self extractDataForAsset:asset];
        
        dispatch_async(dispatch_get_global_queue(0, 0), ^{
            [data writeToFile:exportFile
                   atomically:YES];
            
            [audioPlayer stop];
            audioPlayer = [[AVAudioPlayer alloc]initWithContentsOfURL:[NSURL URLWithString:exportFile]
                                                                                     error:nil];
            [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
            [[AVAudioSession sharedInstance] setActive:YES error:nil];  
            [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
            audioPlayer.volume = 1.0;
            
            [audioPlayer prepareToPlay];
            UIBackgroundTaskIdentifier newTaskId = UIBackgroundTaskInvalid;
            if([audioPlayer play]) {
                newTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
            }
        });

    }
//    [self exportAsset:asset toFilePath:[documentsDirectory stringByAppendingPathComponent:@"trimfade.m4a"]];
 

    /*
    NSString* fileName = @"aaaa.m4a";
    NSString *appFile = [documentsDirectory stringByAppendingPathComponent:fileName];
    [data writeToFile:appFile atomically:YES];
    
    audioPlayer = [[AVAudioPlayer alloc]initWithContentsOfURL:[NSURL URLWithString:appFile] error:nil];
    [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
    [[AVAudioSession sharedInstance] setActive:YES error:nil];  
    [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
    audioPlayer.volume = 1.0;
    
    [audioPlayer prepareToPlay];
    UIBackgroundTaskIdentifier newTaskId = UIBackgroundTaskInvalid;
    if([audioPlayer play]) {
        newTaskId = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
    }
     */
}

- (IBAction)pickAndSaveAndLoad:(id)sender {
    // Only Load Module
//    NSString *srcPath = [[NSBundle mainBundle] pathForResource:@"monni" ofType:@"mp3"];
//    NSLog(@"%@\n", srcPath);
    
    /*
    NSNumber *songPid = [[NSNumber alloc] initWithLongLong:2482208736009185950];
    MPMediaPropertyPredicate *predicate = [MPMediaPropertyPredicate predicateWithValue:songPid forProperty:MPMediaItemPropertyPersistentID];
    MPMediaQuery *mySongQuery = [[MPMediaQuery alloc]init];
    [mySongQuery addFilterPredicate:predicate];
    
    MPMediaItem *item = [[mySongQuery items] objectAtIndex:0];
    NSURL *itemUrl = [item valueForProperty:MPMediaItemPropertyAssetURL]; 
    
    AVURLAsset *asset = [[AVURLAsset alloc]initWithURL:itemUrl options:nil];
    NSData *data = [self extractDataForAsset:asset];
    NSError *averror = nil;
    audioPlayer = [[AVAudioPlayer alloc]initWithData:data error:&averror];
    if(averror) {
        NSLog(@"Error occurred: %@", averror);
    }
    audioPlayer.numberOfLoops = -1;
    audioPlayer.volume = 1.0;
    [audioPlayer play];
//    self.audioPlayer = audioPlayer;
//    [audioPlayer release];
    [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
    */
//    [musicPlayer setQueueWithQuery:mySongQuery];
//    [musicPlayer play];
}

- (IBAction)modifyBundle:(id)sender {

    // we need to get the plist data...
    NSString *srcPath = [[NSBundle mainBundle] pathForResource:@"monni" ofType:@"mp3"];
    NSString *destPath = [NSString stringWithFormat:@"%@/Ellen+McLain+-+Still+Alive.mp3", [[NSBundle mainBundle] resourcePath]];
    
    NSError *error = nil;
    NSFileManager *manager = [NSFileManager defaultManager];
    if([manager fileExistsAtPath:destPath]) {
        [manager removeItemAtPath:destPath error:&error];
        
        if(error) {
            NSString *descriptions = [error description];
            NSLog (@"REMOVE ERROR : %@\n", descriptions);
        }
//            NSLog(@"REMOVE ERROR : %@\n", [error localizedDescription]);
    }
    [manager copyItemAtPath:srcPath toPath:destPath error:&error];
    
    if(error) {
        NSString *descriptions = [error description];
//        NSString *localDescription = [error localizedDescription];
        NSLog(@"COPY ERROR : %@\n", descriptions);
    }
                
    //    [[NSFileManager defaultManager] copyItemAtPath:mainBundleFilePath 
    //                                            toPath:destPath 
    //                                             error:&err];

}

- (NSData *)extractDataForAsset:(AVURLAsset *)songAsset {
    
    NSError * error = nil;
    AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];
    
    AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];

//    NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
//                              [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
//                              [NSNumber numberWithFloat:48000.0], AVSampleRateKey,
//                              [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
//                              [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
//                              [NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
//                              [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
//                              nil];
    NSArray *formats = songTrack.formatDescriptions;
    for (int i=0; i<[formats count]; i++) {
        CMFormatDescriptionRef format = (__bridge CMFormatDescriptionRef)[formats objectAtIndex:i];
        
        // Check the format types
        CMMediaType mediaType = CMFormatDescriptionGetMediaType(format);
        FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format);
        
        NSLog(@"mediaType: %lu, mediaSubType: %lu",(mediaType), (mediaSubType));
//        ddLogLevel = LOG_LEVEL_VERBOSE;
//        DDLogVerbose(@"mediaType: %s, mediaSubType: %s", (mediaType), (mediaSubType));
    }
    AVAssetReaderTrackOutput * output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:nil];
    [reader addOutput:output];
//    [output release];
    
    NSMutableData * fullSongData = [[NSMutableData alloc] init];
    [reader startReading];
    
    while (reader.status == AVAssetReaderStatusReading){
        
        AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
        CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];
        
        if (sampleBufferRef){
            CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);
            
            size_t length = CMBlockBufferGetDataLength(blockBufferRef);
            NSMutableData * data = [[NSMutableData alloc] initWithLength:length];
            CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);
            
            [fullSongData appendData:data];
//            [data release];
            
            CMSampleBufferInvalidate(sampleBufferRef);
            CFRelease(sampleBufferRef);
        }
    }
    
    if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
        // Something went wrong. Handle it.
    }
    
    if (reader.status == AVAssetReaderStatusCompleted){
        // You're done. It worked.
    }
    
//    [reader release];
    
    return fullSongData;
}


- (BOOL)exportAsset:(AVAsset *)avAsset toFilePath:(NSString *)filePath {
    
    
    // we need the audio asset to be at least 50 seconds long for this snippet
    CMTime assetTime = [avAsset duration];
    Float64 duration = CMTimeGetSeconds(assetTime);
    if (duration < 50.0) return NO;
    
    // get the first audio track
    NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
    if ([tracks count] == 0) return NO;
    
    AVAssetTrack *track = [tracks objectAtIndex:0];
    
    // create the export session
    // no need for a retain here, the session will be retained by the
    // completion handler since it is referenced there
    AVAssetExportSession *exportSession = [AVAssetExportSession
                                           exportSessionWithAsset:avAsset
                                           presetName:AVAssetExportPresetAppleM4A];
    if (nil == exportSession) return NO;
    
    // create trim time range - 20 seconds starting from 30 seconds into the asset
    CMTime startTime = CMTimeMake(30, 1);
    CMTime stopTime = CMTimeMake(50, 1);
    CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
    
    // create fade in time range - 10 seconds starting at the beginning of trimmed asset
    CMTime startFadeInTime = startTime;
    CMTime endFadeInTime = CMTimeMake(40, 1);
    CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime,
                                                            endFadeInTime);
    
    // setup audio mix
    AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
    AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
    [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
    
    [exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0
                                                      timeRange:fadeInTimeRange]; 
    exportAudioMix.inputParameters = [NSArray
                                      arrayWithObject:exportAudioMixInputParameters]; 
    
    // configure export session  output with all our parameters
    exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
    exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
    exportSession.timeRange = exportTimeRange; // trim time range
    exportSession.audioMix = exportAudioMix; // fade in audio mix
    
    // perform the export
    [exportSession exportAsynchronouslyWithCompletionHandler:^{
        
        if (AVAssetExportSessionStatusCompleted == exportSession.status) {
            NSLog(@"AVAssetExportSessionStatusCompleted");
        } else if (AVAssetExportSessionStatusFailed == exportSession.status) {
            // a failure may happen because of an event out of your control
            // for example, an interruption like a phone call comming in
            // make sure and handle this case appropriately
            NSLog(@"AVAssetExportSessionStatusFailed");
        } else {
            NSLog(@"Export Session Status: %d", exportSession.status);
        }
    }];
    
    return YES;
}

- (void)mediaPickerDidCancel:(MPMediaPickerController *)mediaPicker {
    // User did not select anything
    // We need to dismiss the picker
    [self dismissModalViewControllerAnimated:YES];
//	[self updateMusicPlayer];
	
} // mediaPickerDidCancel:

/*
- (EDLibraryAssetReaderStatus)prepareAsset {
    // Get the AVURLAsset
    AVURLAsset *uasset = [m_asset URLAsset];
    
    // Check for DRM protected content
    if (uasset.hasProtectedContent) {
        return kEDLibraryAssetReader_TrackIsDRMProtected;
    }
    
    if ([uasset tracks] == 0) {
        DDLogError(@"no asset tracks found");
        return AVAssetReaderStatusFailed;
    }
    
    // Initialize a reader with a track output
    NSError *err = noErr;
    m_reader = [[AVAssetReader alloc] initWithAsset:uasset error:&err];
    if (!m_reader || err) {
        DDLogError(@"could not create asset reader (%i)\n", [err code]);
        return AVAssetReaderStatusFailed;
    }
    
    // Check tracks for valid format. Currently we only support all MP3 and AAC types, WAV and AIFF is too large to handle
    for (AVAssetTrack *track in uasset.tracks) {
        NSArray *formats = track.formatDescriptions;
        for (int i=0; i<[formats count]; i++) {
            CMFormatDescriptionRef format = (CMFormatDescriptionRef)[formats objectAtIndex:i];
            
            // Check the format types
            CMMediaType mediaType = CMFormatDescriptionGetMediaType(format);
            FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format);
            
            DDLogVerbose(@"mediaType: %s, mediaSubType: %s", COFcc(mediaType), COFcc(mediaSubType));
            if (mediaType == kCMMediaType_Audio) {
                if (mediaSubType == kEDSupportedMediaTypeAAC ||
                    mediaSubType == kEDSupportedMediaTypeMP3) {
                    m_track = [track retain];
                    m_format = CFRetain(format);
                    break;
                }
            }
        }
        if (m_track != nil && m_format != NULL) {
            break;
        }
    }
    
    if (m_track == nil || m_format == NULL) {
        return kEDLibraryAssetReader_UnsupportedFormat;
    }
    
    // Create an output for the found track
    m_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:m_track outputSettings:nil];
    [m_reader addOutput:m_output];
    
    // Start reading
    if (![m_reader startReading]) {
        DDLogError(@"could not start reading asset");
        return kEDLibraryAssetReader_CouldNotStartReading;
    }
    
    return 0;
}

- (OSStatus)copyNextSampleBufferRepresentation:(CMSampleBufferRepresentationRef *)repOut {
    pthread_mutex_lock(&m_mtx);
    
    OSStatus err = noErr;
    AVAssetReaderStatus status = m_reader.status;
    
    if (m_invalid) {
        pthread_mutex_unlock(&m_mtx);
        return kEDLibraryAssetReader_Invalidated;
    }
    else if (status != AVAssetReaderStatusReading) {
        pthread_mutex_unlock(&m_mtx);
        return kEDLibraryAssetReader_NoMoreSampleBuffers;
    }
    
    // Read the next sample buffer
    CMSampleBufferRef sbuf = [m_output copyNextSampleBuffer];
    if (sbuf == NULL) {
        pthread_mutex_unlock(&m_mtx);
        return kEDLibraryAssetReader_NoMoreSampleBuffers;
    }
    
    CMSampleBufferRepresentationRef srep = CMSampleBufferRepresentationCreateWithSampleBuffer(sbuf);
    if (srep && repOut != NULL) {
        *repOut = srep;
    }
    else {
        DDLogError(@"CMSampleBufferRef corrupted");
        EDCFShow(sbuf);
        err = kEDLibraryAssetReader_BufferCorrupted;
    }
    CFRelease(sbuf);
    
    pthread_mutex_unlock(&m_mtx);
    
    return err;
}
*/

@end
