//
//  CSGCamera.m
//
//  Created by Tim Omernick on 3/7/05.
//  Copyright 2005 Tim Omernick. All rights reserved.
//

// Portions of this file were inspired by Apple Computer, Inc.'s Cocoa SGDataProc example, which can be found here:
// <http://developer.apple.com/samplecode/Cocoa_-_SGDataProc/Cocoa_-_SGDataProc.html>
// Also, I'd like to thank Chris Meyer for his excellent -imageFromGWorld: method, which he gave me permission to use for this framework.
// Updates made by Ben Britten 03/2008
// the changes i made include the video settings dialog
// and I changed the imageFromGworld to return a greyscale image instead of a RGB one
// this for my specific purpose of analyzing infared images

#import "CSGCamera.h"
#import "CSGImage.h"
#import <Accelerate/Accelerate.h>


@interface CSGCamera (Private)
- (void)_sequenceGrabberIdle;
- (BOOL)_setupDecompression;
- (void)_didUpdate;
- (CSGImage *)_BWImageFromGWorld:(GWorldPtr)gworld;
@end

@interface CSGCamera (SequenceGrabber)
pascal OSErr CSGCameraSGDataProc(SGChannel channel, Ptr data, long dataLength, long *offset, long channelRefCon, TimeValue time, short writeType, long refCon);
@end

@implementation CSGCamera
@synthesize settingsFilePath;

// Init and dealloc

+ (void)initialize
{
	// initialize the QT toolbox
	EnterMovies();
}

- (void)dealloc;
{
	[self stop];
	
	[delegate release];
	
	[super dealloc];
}


- (id) init
{
	self = [super init];
	if (self != nil) {
		component = OpenDefaultComponent(SeqGrabComponentType, 0);
    if (!component) {
			NSLog(@"Could not open sequence grabber component.");
			return nil;
    }
    
    // Initialize sequence grabber component
    OSErr theErr = SGInitialize(component);
    if (theErr != noErr) {
			NSLog(@"SGInitialize() returned %ld", theErr);
			return nil;
    }
    
    // Don't make movie
    theErr = SGSetDataRef(component, 0, 0, seqGrabDontMakeMovie);
    if (theErr != noErr) {
			NSLog(@"SGSetDataRef() returned %ld", theErr);
			return nil;
    }
	}
	return self;
}


// the video settings dialog
// it is kinda old and not cocoa.
// added by ben
-(void)loadSettings
{
	// check to see if there is a settings file to load/save from
	if (self.settingsFilePath == nil) return;
	if (![[NSFileManager defaultManager] fileExistsAtPath:self.settingsFilePath])	return;
	
	[self pauseForSettings];
	UserData	ud = NULL;
	Handle		hud = NULL;
	short		idx;
	SGChannel	c;
	OSType		type;
	
	// grab the file from the app support directory
	// TODO: 
	NSData * blob = [NSData dataWithContentsOfFile:settingsFilePath];
	
	hud = NewHandle([blob length]);
	memcpy(*hud, [blob bytes], [blob length]);
	NewUserDataFromHandle(hud, &ud);
	SGSetSettings(component, ud, 0);
	
	idx = 0;
	// iterate through the settings as long as there is no error
	while ( noErr == SGGetIndChannel(component, ++idx, &c, &type) )
	{
		if (type == VideoMediaType) {
			channel = c;
		}
	}				
	
	[self resumeForSettings];
}


// the video settings dialog
// it is kinda old and not cocoa.
// added by ben
- (void)saveSettings
{
	// check to see if there is a settings file to load/save from
	if (self.settingsFilePath == nil) return;	
	
	UserData	ud = NULL;
	Handle		hud = NewHandle(0);
	NSData *	data = nil;
	
	SGGetSettings(component, &ud, 0);
	PutUserDataIntoHandle(ud, hud);
	
	data = [NSData dataWithBytes:*hud length:GetHandleSize(hud)];

	DisposeUserData(ud);
	DisposeHandle(hud);
	
	// grab the file from the app support directory
	NSError * err;
	if (![data writeToFile:self.settingsFilePath options:NSAtomicWrite error:&err]) {
			[[NSAlert alertWithMessageText:@"Camera Settings Could Not Be Saved" defaultButton:@"Oh, ok." alternateButton:nil otherButton:nil informativeTextWithFormat:@"The file system returned this erorr: %@",[err localizedDescription]] runModal];
	}
}

// API

- (void)setDelegate:(id)newDelegate;
{
    if (delegate == newDelegate)
        return;
        
    [delegate release];
    delegate = [newDelegate retain];
}

- (BOOL)startWithSize:(NSSize)frameSize;
{
	OSErr theErr;
	
	timeScale = 0;
	lastTime = 0;
	
	// Create sequence grabber video channel
	theErr = SGNewChannel(component, VideoMediaType, &channel);
	if (theErr != noErr) {
		NSLog(@"SGNewChannel() returned %ld", theErr);
		return NO;
	}
	
	// Set the grabber's bounds
	boundsRect.top = 0;
	boundsRect.left = 0;
	boundsRect.bottom = frameSize.height;
	boundsRect.right = frameSize.width;
	
	// Create the GWorld
	theErr = QTNewGWorld(&gWorld, k32ARGBPixelFormat, &boundsRect, 0, NULL, 0);
	if (theErr != noErr) {
		NSLog(@"QTNewGWorld() returned %ld trying to open bounds size:%@", theErr,NSStringFromSize(frameSize));
		return NO;
	}
	
	// Lock the pixmap
	if (!LockPixels(GetPortPixMap(gWorld))) {
		NSLog(@"Could not lock pixels.");
		return NO;
	}
	
	// Set GWorld
	theErr = SGSetGWorld(component, gWorld, GetMainDevice());
	if (theErr != noErr) {
		NSLog(@"SGSetGWorld() returned %ld", theErr);
		return NO;
	}
	
	// Set the channel's bounds
	theErr = SGSetChannelBounds(channel, &boundsRect);
	if (theErr != noErr) {
		NSLog(@"SGSetChannelBounds(2) returned %ld", theErr);
		return NO;
	}
	
	// Set the channel usage to record
	theErr = SGSetChannelUsage(channel, seqGrabRecord);
	if (theErr != noErr) {
		NSLog(@"SGSetChannelUsage() returned %ld", theErr);
		return NO;
	}
	
	// Set data proc
	theErr = SGSetDataProc(component, NewSGDataUPP(&CSGCameraSGDataProc), (long)self);
	if (theErr != noErr) {
		NSLog(@"SGSetDataProc() returned %ld", theErr);
		return NO;
	}
	
	// Prepare
	theErr = SGPrepare(component, false, true);
	if (theErr != noErr) {
		NSLog(@"SGPrepare() returned %ld", theErr);
		return NO;
	}
	
	// Start recording
	theErr = SGStartRecord(component);
	if (theErr != noErr) {
		NSLog(@"SGStartRecord() returned %ld", theErr);
		return NO;
	}
	
	startTime = [NSDate timeIntervalSinceReferenceDate];
	
	// Set up decompression sequence (camera -> GWorld)
	[self _setupDecompression];
	
	// Start frame timer
	frameTimer = [[NSTimer scheduledTimerWithTimeInterval:0.0 target:self selector:@selector(_sequenceGrabberIdle) userInfo:nil repeats:YES] retain];
	
	[self loadSettings];
	
	[self retain]; // Matches autorelease in -stop
	return YES;
}


////////////////////////////////////////////////////
// the video settings dialog
// it is kinda old and not cocoa.
// added by ben
-(void)showSettings
{	
	[self pauseForSettings];
	OSErr theErr = SGSettingsDialog(component, channel, 0, NULL, 0, NULL, 0);
	if (theErr != noErr) {
		NSLog(@"SGSettingsDialog() returned %ld", theErr);
	}
	[self saveSettings];
	[self resumeForSettings];
}

-(void)pauseForSettings
{
	if (frameTimer) {
		[frameTimer invalidate];
		[frameTimer release];
		frameTimer = nil;
	}
	SGStop(component);
}

-(void)resumeForSettings
{
	// Set the channel usage to record
	OSErr theErr = SGSetChannelUsage(channel, seqGrabRecord);
	if (theErr != noErr) {
		NSLog(@"SGSetChannelUsage() returned %ld", theErr);
	}
	// Don't make movie
	theErr = SGSetDataRef(component, 0, 0, seqGrabDontMakeMovie);
	if (theErr != noErr) {
		NSLog(@"SGSetDataRef() returned %ld", theErr);
	}
	// Start recording
	theErr = SGStartRecord(component);
	if (theErr != noErr) {
		NSLog(@"SGStartRecord() returned %ld", theErr);
	}
	startTime = [NSDate timeIntervalSinceReferenceDate];
	frameTimer = [[NSTimer scheduledTimerWithTimeInterval:0.0 target:self selector:@selector(_sequenceGrabberIdle) userInfo:nil repeats:YES] retain];
}
////////////////////////////////////////////////////


-(void)endDecompression
{
	if (decompressionSequence) {
		OSErr theErr = CDSequenceEnd(decompressionSequence);
		if (theErr != noErr) {
			NSLog(@"CDSequenceEnd() returned %ld", theErr);
		}
		decompressionSequence = 0;
	}	
}

- (BOOL)stop;
{    
    // Stop frame timer
	if (frameTimer) {
		[frameTimer invalidate];
		[frameTimer release];
		frameTimer = nil;
	}
    
    // Stop recording
	if (component)
		SGStop(component);
    
    ComponentResult theErr;

    // End decompression sequence
	[self endDecompression];
    // Close sequence grabber component
	if (component) {
		theErr = CloseComponent(component);
		if (theErr != noErr) {
			NSLog(@"CloseComponent() returned %ld", theErr);
		}
		component = NULL;
	}
    
    // Dispose of GWorld
	if (gWorld) {
		DisposeGWorld(gWorld);
		gWorld = NULL;
	}
    
    [self autorelease]; // Matches retain in -start
    
    return YES;
}

@end

@implementation CSGCamera (Private)

- (void)_sequenceGrabberIdle;
{
    OSErr theErr;
    theErr = SGIdle(component);
    if (theErr != noErr) {
        NSLog(@"SGIdle returned %ld", theErr);
        return;
    }
}

- (BOOL)_setupDecompression;
{
    ComponentResult theErr;
    
    ImageDescriptionHandle imageDesc = (ImageDescriptionHandle)NewHandle(0);
    theErr = SGGetChannelSampleDescription(channel, (Handle)imageDesc);
    if (theErr != noErr) {
        NSLog(@"SGGetChannelSampleDescription() returned %ld", theErr);
        return NO;
    }
    
    Rect sourceRect;
    sourceRect.top = 0;
    sourceRect.left = 0;
    sourceRect.right = (**imageDesc).width;
    sourceRect.bottom = (**imageDesc).height;
	    
    MatrixRecord scaleMatrix;
    RectMatrix(&scaleMatrix, &sourceRect, &boundsRect);
    
    theErr = DecompressSequenceBegin(&decompressionSequence, imageDesc, gWorld, NULL, NULL, &scaleMatrix, srcCopy, NULL, 0, codecNormalQuality, bestSpeedCodec);
    if (theErr != noErr) {
        NSLog(@"DecompressionSequenceBegin() returned %ld", theErr);
        return NO;
    }
    
    DisposeHandle((Handle)imageDesc);
	
	return YES;
}


- (void)_didUpdate;
{
    if ([delegate respondsToSelector:@selector(camera:didReceiveFrame:)]) {
			//NSDate * start = [NSDate date];
			CSGImage *frameImage = [self _BWImageFromGWorld:gWorld];
			//NSLog(@"frame: %f",[start timeIntervalSinceNow]);
			if (frameImage) {
            [frameImage setSampleTime:startTime + ((double)lastTime / (double)timeScale)];
            [delegate camera:self didReceiveFrame:frameImage];
        }
    }
}


// Thanks to Chris Meyer from http://www.cocoadev.com/
- (CSGImage *)_originalImageFromGWorld:(GWorldPtr)gworld;
{
	NSParameterAssert( gworld != NULL );
	
	PixMapHandle pixMapHandle = GetGWorldPixMap( gworld );
	if ( LockPixels( pixMapHandle ) )
	{
		Rect portRect;
		GetPortBounds( gworld, &portRect );
		int pixels_wide = (portRect.right - portRect.left);
		int pixels_high = (portRect.bottom - portRect.top);
		
		int bps = 8;
		int spp = 4;
		BOOL has_alpha = YES;
		
		NSBitmapImageRep *frameBitmap = [[[NSBitmapImageRep alloc]
																			initWithBitmapDataPlanes:NULL
																			pixelsWide:pixels_wide
																			pixelsHigh:pixels_high
																			bitsPerSample:bps
																			samplesPerPixel:spp
																			hasAlpha:has_alpha
																			isPlanar:NO
																			colorSpaceName:NSDeviceRGBColorSpace
																			bytesPerRow:0
																			bitsPerPixel:0] autorelease];
		
		CGColorSpaceRef dst_colorspaceref = CGColorSpaceCreateDeviceRGB();
		
		CGImageAlphaInfo dst_alphainfo = has_alpha ? kCGImageAlphaPremultipliedLast : kCGImageAlphaNone;
		
		CGContextRef dst_contextref = CGBitmapContextCreate( [frameBitmap bitmapData],
																												pixels_wide,
																												pixels_high,
																												bps,
																												[frameBitmap bytesPerRow],
																												dst_colorspaceref,
																												dst_alphainfo );
		
		void *pixBaseAddr = GetPixBaseAddr(pixMapHandle);
		
		long pixmapRowBytes = GetPixRowBytes(pixMapHandle);
		
		CGDataProviderRef dataproviderref = CGDataProviderCreateWithData( NULL, pixBaseAddr, pixmapRowBytes * pixels_high, NULL );
		
		int src_bps = 8;
		int src_spp = 4;
		BOOL src_has_alpha = YES;
		
		CGColorSpaceRef src_colorspaceref = CGColorSpaceCreateDeviceRGB();
		
		CGImageAlphaInfo src_alphainfo = src_has_alpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
		
		CGImageRef src_imageref = CGImageCreate( pixels_wide,
																						pixels_high,
																						src_bps,
																						src_bps * src_spp,
																						pixmapRowBytes,
																						src_colorspaceref,
																						src_alphainfo,
																						dataproviderref,
																						NULL,
																						NO, // shouldInterpolate
																						kCGRenderingIntentDefault );
		
		CGRect rect = CGRectMake( 0, 0, pixels_wide, pixels_high );
		
		CGContextDrawImage( dst_contextref, rect, src_imageref );
		
		CGImageRelease( src_imageref );
		CGColorSpaceRelease( src_colorspaceref );
		CGDataProviderRelease( dataproviderref );
		CGContextRelease( dst_contextref );
		CGColorSpaceRelease( dst_colorspaceref );
		
		UnlockPixels( pixMapHandle );
		
		CSGImage *image = [[CSGImage alloc] initWithSize:NSMakeSize(pixels_wide, pixels_high)];
		[image addRepresentation:frameBitmap];
		
		return [image autorelease];
	}
	
	return NULL;
}


// this is a modified version of the 'original' one
// the main difference is that the destination gworld is a monochrome
// bitmap
- (CSGImage *)_BWImageFromGWorld:(GWorldPtr)gworld;
{
	NSParameterAssert( gworld != NULL );

	PixMapHandle pixMapHandle = GetGWorldPixMap( gworld );
	if ( LockPixels( pixMapHandle ) )
	{
		Rect portRect;
		GetPortBounds( gworld, &portRect );
		int pixels_wide = (portRect.right - portRect.left);
		int pixels_high = (portRect.bottom - portRect.top);
		
		int bps = 8;
		int spp = 1;
		BOOL has_alpha = NO;
		
		NSBitmapImageRep *frameBitmap = [[NSBitmapImageRep alloc]
																			initWithBitmapDataPlanes:NULL
																			pixelsWide:pixels_wide
																			pixelsHigh:pixels_high
																			bitsPerSample:bps
																			samplesPerPixel:spp
																			hasAlpha:has_alpha
																			isPlanar:YES
																			colorSpaceName:NSDeviceWhiteColorSpace
																			bytesPerRow:0
																			bitsPerPixel:0];
		
		CGColorSpaceRef dst_colorspaceref = CGColorSpaceCreateDeviceGray();
		
		CGImageAlphaInfo dst_alphainfo = has_alpha ? kCGImageAlphaPremultipliedLast : kCGImageAlphaNone;
		
		CGContextRef dst_contextref = CGBitmapContextCreate( [frameBitmap bitmapData],
																												pixels_wide,
																												pixels_high,
																												bps,
																												[frameBitmap bytesPerRow],
																												dst_colorspaceref,
																												dst_alphainfo );
		
		void *pixBaseAddr = GetPixBaseAddr(pixMapHandle);
		
		long pixmapRowBytes = GetPixRowBytes(pixMapHandle);
		
		CGDataProviderRef dataproviderref = CGDataProviderCreateWithData( NULL, pixBaseAddr, pixmapRowBytes * pixels_high, NULL );
		
		int src_bps = 8;
		int src_spp = 4;
		BOOL src_has_alpha = YES;
		
		CGColorSpaceRef src_colorspaceref = CGColorSpaceCreateDeviceRGB();
		
		CGImageAlphaInfo src_alphainfo = src_has_alpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
		
		CGImageRef src_imageref = CGImageCreate( pixels_wide,
																						pixels_high,
																						src_bps,
																						src_bps * src_spp,
																						pixmapRowBytes,
																						src_colorspaceref,
																						src_alphainfo,
																						dataproviderref,
																						NULL,
																						NO, // shouldInterpolate
																						kCGRenderingIntentDefault );
		
		CGRect rect = CGRectMake( 0, 0, pixels_wide, pixels_high );
		
		CGContextDrawImage( dst_contextref, rect, src_imageref );
		
		CGImageRelease( src_imageref );
		CGColorSpaceRelease( src_colorspaceref );
		CGDataProviderRelease( dataproviderref );
		CGContextRelease( dst_contextref );
		CGColorSpaceRelease( dst_colorspaceref );
		
		UnlockPixels( pixMapHandle );
		
		CSGImage *image = [[CSGImage alloc] initWithSize:NSMakeSize(pixels_wide, pixels_high)];
		[image addRepresentation:frameBitmap];
		[frameBitmap release];
		
		return [image autorelease];
	}	
	return NULL;
}

// this is a version of the image from gworld that returns a CIImage
- (CIImage *)_CIImageFromGWorld:(GWorldPtr)gworld;
{
	NSParameterAssert( gworld != NULL );
	
	PixMapHandle pixMapHandle = GetGWorldPixMap( gworld );
	if ( LockPixels( pixMapHandle ) )
	{
		Rect portRect;
		GetPortBounds( gworld, &portRect );
		int pixels_wide = (portRect.right - portRect.left);
		int pixels_high = (portRect.bottom - portRect.top);
		
		unsigned char * pixBaseAddr = GetPixBaseAddr(pixMapHandle);
		long pixmapRowBytes = GetPixRowBytes(pixMapHandle);
		
		int src_bps = 8;
		int src_spp = 4;
		BOOL src_has_alpha = YES;

		CGDataProviderRef dataproviderref = CGDataProviderCreateWithData( NULL, pixBaseAddr, pixmapRowBytes * pixels_high, NULL );

		CGColorSpaceRef src_colorspaceref = CGColorSpaceCreateDeviceRGB();
		
		CGImageAlphaInfo src_alphainfo = src_has_alpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
		
		CGImageRef src_imageref = CGImageCreate( pixels_wide,
																						pixels_high,
																						src_bps,
																						src_bps * src_spp,
																						pixmapRowBytes,
																						src_colorspaceref,
																						src_alphainfo,
																						dataproviderref,
																						NULL,
																						NO, // shouldInterpolate
																						kCGRenderingIntentDefault );
				
		UnlockPixels( pixMapHandle );
		
		return [CIImage imageWithCGImage:src_imageref];
	}
	return NULL;
}


@end

@implementation CSGCamera (SequenceGrabber)

pascal OSErr CSGCameraSGDataProc(SGChannel channel, 
																 Ptr data, 
																 long dataLength, 
																 long *offset, 
																 long channelRefCon, 
																 TimeValue time, 
																 short writeType, 
																 long refCon)
{
    CSGCamera *camera = (CSGCamera *)refCon;
    ComponentResult theErr;
    
    if (camera->timeScale == 0) {
        theErr = SGGetChannelTimeScale(camera->channel, &camera->timeScale);
        if (theErr != noErr) {
            NSLog(@"SGGetChannelTimeScale() returned %ld", theErr);
            return theErr;
        }
    }
    
    if (camera->gWorld) {
        CodecFlags ignore;
        theErr = DecompressSequenceFrameS(camera->decompressionSequence, data, dataLength, 0, &ignore, NULL);
        if (theErr != noErr) {
            NSLog(@"DecompressSequenceFrameS() returned %ld", theErr);
            return theErr;
        }
    }
    
    camera->lastTime = time;
    
    [camera _didUpdate];
    
    return noErr;
}

@end
