#import "openIsight.h"

@implementation openIsightAppDelegate:NSThread


// This delegate method is called whenever the QTCaptureDecompressedVideoOutput receives a frame
- (void)captureOutput:
	(QTCaptureOutput *)captureOutput
	didOutputVideoFrame:(CVImageBufferRef)videoFrame 
	withSampleBuffer:(QTSampleBuffer *)sampleBuffer
	fromConnection:(QTCaptureConnection *)connection
{
    // Store the latest frame
	// This must be done in a @synchronized block because this delegate method is not called on the main thread
    CVImageBufferRef imageBufferToRelease;
    CVBufferRetain(videoFrame);
	CIImage *image = [CIImage imageWithCVImageBuffer:videoFrame];

    
    @synchronized (self) {
        imageBufferToRelease = mCurrentImageBuffer;
        mCurrentImageBuffer = videoFrame;
    }
	
	NSLog(@"captureOutput called!\n");
    
    CVBufferRelease(imageBufferToRelease);
	NSBitmapImageRep* bitmap = [[NSBitmapImageRep alloc] initWithCIImage:image];
	[
	 [bitmap representationUsingType:NSJPEGFileType properties:nil]
	 writeToFile:[file_name stringByExpandingTildeInPath]
	 atomically:YES
	];
	[bitmap release];
	NSLog(@"Stopping Run...\n");
	[cs stopRunning];
	NSLog(@"Removing input...\n");
	[cs removeInput:cdi];
	[my_sight close];
	am_finished = true;
	
	[NSApp terminate:self];
	
	

}
-(void)set_file_name:(NSString*)fn {
	file_name = fn;
}

-(NSString *)get_file_name
{
	return file_name;
}

-(bool)finished{
	return am_finished;
}

-(void *)setup_capture
{

	pool = [[NSAutoreleasePool alloc] init];
	cs = [[QTCaptureSession alloc] init]; 
	my_sight = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
	BOOL aok;
	NSUserDefaults *args = [NSUserDefaults standardUserDefaults];
	[self set_file_name:[args stringForKey:@"filename"]];
	if([args stringForKey:@"filename"] == NULL){
		printf("Please specify a filename with the -filename flag\n\n");
		exit(0);
	}
	if(my_sight){
		aok = [my_sight open:&error];
		if(!aok){
			NSLog(@"Could not open QTCaptureDevice...\n\n");
			[pool drain];
			return 0;
		}
		cdi = [[QTCaptureDeviceInput alloc] initWithDevice:my_sight];
		aok = [cs addInput:cdi error:&error];
		
		if(!aok){
			NSLog(@"Could not init device %s...\n\n", error);
			[pool drain];
			return 0;
		}
		
        dvo = [[QTCaptureDecompressedVideoOutput alloc] init];
        aok = [cs addOutput:dvo error:&error];
        if (!aok) {
            NSLog(@"Adding output to capture session failed -- %s...\n", error);
            [pool drain];
			return 0;
        }
		
		[dvo setDelegate:self];
		
		
		NSLog(@"Starting Run...\n");
		[cs startRunning];
		
		
	}
	else{
		NSLog(@"Could not create QTCaptureDevice...\n\n");
		[pool drain];
		return 0;
	}
	return 0;
}


-(void) do_capture
{
	// This must be done in a @synchronized block because the delegate method that sets the most recent frame is not called on the main thread
    CVImageBufferRef imageBuffer = NULL;
    NSLog(@"Waitig for image...\n");
    @synchronized (self) {
       
		imageBuffer = CVBufferRetain(mCurrentImageBuffer);
				
    }
	
}


-(void)main{
	am_finished = false;
	[self setup_capture];
	[self do_capture];
}


@end
