//
//  MobroControllAppViewController.m
//  MobroControllApp
//
//  Created by Óbudai Egyetem on 2011.11.13..
//  Copyright 2011 __MyCompanyName__. All rights reserved.
//

#import "MobroControllAppViewController.h"

#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import <QuartzCore/QuartzCore.h>
#import <ImageIO/ImageIO.h>
#import <AssetsLibrary/AssetsLibrary.h>

@implementation MobroControllAppViewController

@synthesize PNGImageData;
@synthesize JPEGImageData;
@synthesize backgroundImageView;
@synthesize takenPic;

@synthesize capturedSession;
@synthesize previewLayer;
@synthesize capturedStillImageOutput;
@synthesize orientation;

CFWriteStreamRef networkStream = NULL;

bool save = false;
bool started = false;
bool lockNetwork = false;

/* Networking */
-(BOOL) getConnectState{
    int status = 0;
    int TimeOutCounter = 0;
    while(status == 0){
        CFStreamStatus streamStatus = CFWriteStreamGetStatus(networkStream);
        switch (streamStatus) {
            case kCFStreamStatusOpen:   
                status = 1;
            case kCFStreamStatusWriting:            
            case kCFStreamStatusOpening:
                [NSThread sleepForTimeInterval:0.2];
                TimeOutCounter++;
                break;
            default:
                status = -1;
        }
        if (TimeOutCounter > 10) {
            status = -1;
        }
    }
    return (status<0?FALSE:TRUE);
}

-(BOOL) Connector: (NSString*) addressString:(NSString*) portString{
    CFWriteStreamRef writeStream = NULL;
    
    CFStringRef host = (CFStringRef)addressString;
    UInt32 port_conn = [portString intValue];
    @try {
        CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, host, port_conn, NULL, &writeStream);
        
        CFWriteStreamSetProperty(writeStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
        
        CFWriteStreamOpen(writeStream);
    }
    @catch (NSException *exception) {
        
    }
    @finally {
        
    }		
	networkStream = writeStream;
    return [self getConnectState];
}

-(BOOL) Disconnector{
    CFWriteStreamClose(networkStream);
    return ![self getConnectState];
}

-(void) SendString:(NSString *)string{
    
    //UInt8 buf[] = [NSString stringWithFormat:@"%@", string];
    int bytesWritten = CFWriteStreamWrite(networkStream, 
                                          (const UInt8*) [string UTF8String] ,
                                          [string lengthOfBytesUsingEncoding:NSUTF8StringEncoding] );
    NSLog(@"Written: %d", bytesWritten);
    
    if (bytesWritten < 0) {
        CFStreamError error = CFWriteStreamGetError(networkStream);
        NSLog(@"Network error code: %ld",error.error);
    }
}


-(void) SendData:(NSData *)data{
    
    //UInt8 buf[] = [NSString stringWithFormat:@"%@", data];
    if(!lockNetwork){
        lockNetwork = true;
        bool connected = [self Connector:address.text :dataPort.text];
        int bytesWritten = CFWriteStreamWrite(networkStream,(const UInt8*) [data bytes] , [data length]);
        NSLog(@"Written: %d", bytesWritten);
        if (bytesWritten < 0 || !connected) {
            CFStreamError error = CFWriteStreamGetError(networkStream);
            NSLog(@"Network error code: %ld",error.error);
        }else{
            lockNetwork = false;
        }
        [self Disconnector];
    }
}

/* Network end */

/* Camera */

- (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *ldevice in devices) {
        if ([ldevice position] == position) {
            return ldevice;
        }
    }
    return nil;
}

- (AVCaptureDevice *) backFacingCamera
{
    NSLog(@"back camera");
    return [self cameraWithPosition:AVCaptureDevicePositionBack];
}

- (AVCaptureDevice *) frontFacingCamera
{
    NSLog(@"front camera");
    return [self cameraWithPosition:AVCaptureDevicePositionFront];
}

AVCaptureConnection *videoConnection;
- (void)setupCaptureSession 
{
    //	NSLog(@"setupCaptureSession");
	
    NSError *error = nil;
	
    //
    // Create the session
	//
    AVCaptureSession *session = [[AVCaptureSession alloc] init];
	[session beginConfiguration];
    //
    // Configure the session to produce lower resolution video frames, if your 
    // processing algorithm can cope. We'll specify medium quality for the
    // chosen device.
	//
    session.sessionPreset = AVCaptureSessionPreset640x480;
    
    //
	// Find a suitable AVCaptureDevice
	//
    device = [self frontFacingCamera];    
	
    //TODO Disable Flash	
    //
	// Support auto flash mode
	//
	if ([device isFlashModeSupported:AVCaptureFlashModeAuto]) 
	{
		NSError *error = nil;
		if ([device lockForConfiguration:&error]) 
        {
			device.flashMode = AVCaptureFlashModeOff;
			[device unlockForConfiguration];
		}
		else 
		{
            NSLog(@"Oops!");
            if ([self respondsToSelector:@selector(flashNotSupported)]) 
            {
                [self flashNotSupported];
            }
		}
	}	
    //
    // Create a device input with the device and add it to the session.
	//
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device 
                                                                        error:&error];
    if (!input) 
	{
        // Handling the error appropriately.
    }
    [session addInput:input];
    
	if ([input.device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) 
	{
		NSError *error = nil;		
        
        if ([input.device lockForConfiguration:&error]) 
        {
            NSLog(@"Focus: %f,%f",input.device.focusPointOfInterest.x, input.device.focusPointOfInterest.y);
            
			input.device.focusMode = AVCaptureFocusModeLocked;
            input.device.focusPointOfInterest = CGPointMake(0.5, 0.5);            
            //input.device.exposureMode = AVCaptureExposureModeLocked;
            input.device.exposurePointOfInterest = CGPointMake(0.5, 0.5);
            [input.device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];            
			[input.device unlockForConfiguration];
            
            NSLog(@"Focus: %f,%f",input.device.focusPointOfInterest.x, input.device.focusPointOfInterest.y);
		}
		else 
		{
            NSLog(@"Oops!");
            if ([self respondsToSelector:@selector(autofocusNotSupported)]) 
            {
                [self autofocusNotSupported];
            }
		}
	}
   	
	
    //
    // Create a AVCaputreStillImageOutput instance and add it to the session
	//
	AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
	NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
	[stillImageOutput setOutputSettings:outputSettings];
    
	[session addOutput:stillImageOutput];	
	
    //
	// This is what actually gets the AVCaptureSession going
	//
    [session commitConfiguration];
    [session startRunning];    
    
    //
    // Assign session we've created here to our AVCaptureSession ivar.
	//
	// KEY POINT: With this AVCaptureSession property, you can start/stop scanning to your hearts content, or 
	// until the code you are trying to read has read it.
	//
	self.capturedStillImageOutput  = stillImageOutput;
	self.capturedSession  = session;
    
    videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[self.capturedStillImageOutput connections]];
}

- (void) captureStillImage
{
    
    /*
    if ([videoConnection isVideoOrientationSupported]) 
    {
        [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
    }
    */
    [self.capturedStillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
                                                               completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) 
     {
         // 
         // If this line is not commented-out, the animationWithDuration:animations:^ never gets called. Weird...
         //
         if (imageDataSampleBuffer != NULL) 
         {
             //
             // Grab the image data as a JPEG still image from the AVCaptureStillImageOutput and create a UIImage image with it.
             //
             JPEGImageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
             
             [self SendData:JPEGImageData];
             
             takenPic = [[UIImage alloc] initWithData:JPEGImageData];
             backgroundImageView.image = takenPic;
             
             NSLog(@"Focus: %f,%f",device.focusPointOfInterest.x, device.focusPointOfInterest.y);
         } 
         else if (error) 
         {
             NSLog(@"Oops!");
             //if ([self respondsToSelector:@selector(captureStillImageFailedWithError:)]) 
             //{
             //   [self captureStillImageFailedWithError:error];
             //}
         }
     }];
    if(started)
        [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector(captureStillImage) userInfo:nil repeats:NO];
}

- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
    for ( AVCaptureConnection *connection in connections ) 
	{
		for ( AVCaptureInputPort *port in [connection inputPorts] ) 
		{
			if ( [[port mediaType] isEqual:mediaType] ) 
			{
				return [[connection retain] autorelease];
			}
		}
	}
	return nil;
}

- (void)autofocusNotSupported{}

- (void)flashNotSupported{}

- (void)captureStillImageFailedWithError:(NSError *)error{
    UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Still Image Capture Failure"
                                                        message:[error localizedDescription]
                                                       delegate:nil
                                              cancelButtonTitle:@"Okay"
                                              otherButtonTitles:nil];
    [alertView show];
    [alertView release];
}

- (void)cameraOn{
    
	[self setupCaptureSession];
    
    //
    // Remove the background image so that the streaming camera video will be visable.
    //
    //backgroundImageView.image = nil;
    //self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.capturedSession];
    //self.previewLayer.frame = self.backgroundImageView.bounds; 
}

- (void)cameraOff{}

/* Camera end*/

- (void)dealloc
{
    [super dealloc];
}

- (void)didReceiveMemoryWarning
{
    // Releases the view if it doesn't have a superview.
    [super didReceiveMemoryWarning];
    
    // Release any cached data, images, etc that aren't in use.
}

#pragma mark - View lifecycle


// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad
{
    [super viewDidLoad];
    [self cameraOn];
}

- (void)viewDidUnload
{
    [super viewDidUnload];
    [JPEGImageData release];
    [PNGImageData release];
    
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
    // Return YES for supported orientations
    return (interfaceOrientation == UIInterfaceOrientationPortrait);
}

/* User interract*/

-(IBAction) keyAway:(id)sender{
    [sender resignFirstResponder];
}



-(IBAction) start:(id)sender{
    
    if(!started){
        [sender setTitle:@"Stop" forState:UIControlStateNormal];
        started = true;
        address.enabled = false;
        dataPort.enabled = false;
        commandPort.enabled = false;
        [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector(captureStillImage) userInfo:nil repeats:NO];       
        //[NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector(captureStillImage) userInfo:nil repeats:NO];   
        
    }else{
        [sender setTitle:@"Start" forState:UIControlStateNormal];
        started = false;
        address.enabled = true;
        dataPort.enabled = true;
        commandPort.enabled = true;
    }    
}

-(IBAction) SendPNGPicture:(id)sender{
    PNGImageData = UIImagePNGRepresentation(backgroundImageView.image);
    [self SendData:PNGImageData];
    [self Disconnector];
}

-(IBAction) SendJPEGPicture:(id)sender{
    PNGImageData = UIImageJPEGRepresentation(backgroundImageView.image, 0.95);
    [self SendData:PNGImageData];
    [self Disconnector];
}

/* User interract end*/

@end
