//
//  ViewController.m
//  newAR
//
//  Created by Shangjin Zhai on 14-11-17.
//  Copyright (c) 2014年 Shangjin Zhai. All rights reserved.
//

#import "ViewController.h"
#include "global.h"
#include <opencv2/nonfree/nonfree.hpp>

#include "globalinit.h"
#include "TestImage.hpp"
using namespace markerAR;
double focus;
double zNear;

extern std::mutex markerAR::mutex_keyframe;
extern std::mutex markerAR::mutex_marker;


static double machTimeToSecs(uint64_t time)
{
  mach_timebase_info_data_t timebase;
  mach_timebase_info(&timebase);
  return (double)time * (double)timebase.numer /
  (double)timebase.denom / 1e9;
}


@interface ViewController ()

@end

@implementation ViewController

@synthesize captureSession = _captureSession;
@synthesize dataOutput = _dataOutput;
@synthesize customPreviewerLayer = _customPreviewerLayer;
@synthesize stillImageOutput = _stillImageOutput;
@synthesize numberLabel = _numberLabel;
@synthesize fpsLabel = _fpsLabel;
@synthesize markerCount = _markerCount;
@synthesize markerList = _markerList;
@synthesize matchButton = _matchButton;
@synthesize refsButton = _refsButton;
@synthesize saveButton = _saveButton;





static void DumpTrackingDataOfGrayImage(int frameID,const Matx33d &R,const Matx31d &t)
{
  static NSURL *dumpFileDir = nil;
  static NSURL *imgFile = [dumpFileDir URLByAppendingPathComponent:@"trajectory.txt"];
  NSMutableData *allData = [NSMutableData data];
  
  // FrameID
  [allData appendBytes:&frameID length:sizeof(frameID)];
  for(int row=0;row<3;row++)
    for(int col=0;col<3;col++)
    {
      [allData appendBytes:&R(row,col) length:sizeof(double)];
    }
  for (int col=0; col<3; col++)
  {
    [allData appendBytes:&t(col,0) length:sizeof(double)];
  }
  
  
  // 写入文件
  [allData writeToURL:imgFile atomically:NO];
  
}









- (double)getFPS
{
  double timeInSeconds;
  uint64_t currentTime = mach_absolute_time();
  if(timeStamp.size() < 11)
  {
    timeStamp.push_back(currentTime);
  }
  else
  {
    for(int i = 0 ; i < 10 ; i++)
      timeStamp[i] = timeStamp[i + 1];
    timeStamp[10] = currentTime;
  }
  timeInSeconds = machTimeToSecs(currentTime - timeStamp[0]) / (timeStamp.size() - 1);
  return 1.0/timeInSeconds;
}


- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
  glClear(GL_COLOR_BUFFER_BIT);
  
  [mVisualizer setupOrtho];
  
  static int texID = [mVisualizer generateTexture:NULL];
  [mVisualizer renderTexture:texID withImagePtr:image];
  
  
  [mVisualizer setupFrustum:zNear];
  glMatrixMode(GL_MODELVIEW);
  glLoadIdentity();

    
//  glTranslatef(0, 0, -9);
    if(isCapture)
    {

        if(CameraPose.size()>0)
        {
//            Eigen::Matrix4d tmp;
//            tmp = CameraPose.transpose();
//            for(int i = 0 ; i < 4 ;i++)
//            {
//                for(int j = 0; j < 4; j++)
//                    m[i][j] = (float)tmp(i, j);
//            }
//            glMatrixMode(GL_MODELVIEW);
//            glLoadMatrixf((GLfloat *)m);

            [mVisualizer renderGrid:0];
            [mVisualizer renderCube:CameraPose];
//            [mVisualizer renderCube:allMarkers[0]->center*2:CameraPose];
            [mVisualizer setupVideoOrtho];
//            cout<<CameraPose.col(3)<<endl;
        }
  }
}



- (void) refsButton: (UIButton *) button
{
  Mat img ;
  @synchronized (_imuLock) { img= imageObject.clone();}
  
  Eigen::Matrix3d R;
  [self getRotationMatrix];
  R = Solve3D::setRotationMatrix(yaw,pitch,roll);

  cout<<"Rotation Matrix"<<R<<endl;
  Marker* m = new Marker(img,R,markerNum);
  bow.addImage(m->orbStructure->kpts,m->orbStructure->desp);
  bow.train();
  markerAR::map->addMarker(m);
    
  markerNum++;

}




- (void)initUI
{
  int width = [UIScreen mainScreen].bounds.size.width;
  int height = [UIScreen mainScreen].bounds.size.height;
  
  _numberLabel = [[UILabel alloc]initWithFrame:CGRectMake(width * 0.03, height * 0.06, width * 0.15, height * 0.06)];
  [self.view addSubview: _numberLabel];
  _numberLabel.backgroundColor = [UIColor yellowColor];
  
  _fpsLabel = [[UILabel alloc]initWithFrame:CGRectMake(width * 0.03, height * 0.06, width * 0.25, height * 0.06)];
  [self.view addSubview: _fpsLabel];
  _fpsLabel.backgroundColor = [UIColor yellowColor];
  
  _markerCount = [[UILabel alloc]initWithFrame:CGRectMake(width * 0.03, height * 0.2, width * 0.25, height * 0.06)];
  [self.view addSubview: _markerCount];
  _markerCount.backgroundColor = [UIColor yellowColor];
  
  _markerList = [[UILabel alloc]initWithFrame:CGRectMake(width * 0.2, height * 0.2, width * 0.15, height * 0.06)];
  [self.view addSubview: _markerList];
  _markerList.backgroundColor = [UIColor yellowColor];
  
  
  
  _refsButton = [[UIButton alloc]initWithFrame:CGRectMake(width * 0.03, height * 0.72, width * 0.15, height * 0.06)];
  [_refsButton setTitle:@"Add Marker" forState:UIControlStateNormal];
  [_refsButton setTitleColor:[UIColor redColor] forState:UIControlStateNormal];
  [_refsButton addTarget:self action:@selector(refsButton:) forControlEvents:UIControlEventTouchUpInside];
  [self.view addSubview:_refsButton];
  
  
}

- (void)show
{
    if (detectfps<0.001) {
        detectfps = 1e10;
    }
  _numberLabel.text = [NSString stringWithFormat:@"匹配点个数:%d", keyPointSize];
  _fpsLabel.text = [NSString stringWithFormat:@"Render FPS = %3.2f",[self getFPS]];
  _markerCount.text = [NSString stringWithFormat:@"Tracking FPS = %3.2f",1./detectfps];
  _markerList.text = [NSString stringWithFormat:@"marker:%d",markerNum];
//  _fpsLabel.text = [NSString stringWithFormat:@"FPS = %3.2f", (random() % 1000) /1000.0+29.5];
  [_numberLabel setHidden:YES];
  [_markerCount setHidden:NO];
  [_markerList setHidden:YES];
}


- (void)setupCameraSession
{
  //Session
  _captureSession = [AVCaptureSession new];
  [_captureSession beginConfiguration];
  [_captureSession setSessionPreset:AVCaptureSessionPresetMedium];
  
  //Capture device
  AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  
  NSError *error;
  
  if([inputDevice lockForConfiguration:&error])
  {
    //        inputDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
    inputDevice.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    inputDevice.exposureMode = AVCaptureExposureModeAutoExpose;
    [inputDevice unlockForConfiguration];
  }
  
  double fov = inputDevice.activeFormat.videoFieldOfView;
  cout<<"fov"<<fov<<endl;
  int width = 480;
  int halfWidth = width/2;
  focus = halfWidth/tan(fov * 3.14159265 / 180 * 0.5);
  
  zNear = 1/tan(fov*270/480 * 3.14159265 / 180 * 0.5);
  
    cout<<"width:"<<self.view.frame.size.width<<"height:"<<self.view.frame.size.height<<endl;
  
  //Device input
  AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
  if([_captureSession canAddInput:deviceInput])
    [_captureSession addInput:deviceInput];
  
  //Preview
  _customPreviewerLayer = [CALayer layer];
  _customPreviewerLayer.bounds = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
  _customPreviewerLayer.position = CGPointMake(self.view.frame.size.width/2, self.view.frame.size.height/2);
  
  //    cout<<self.view.frame.size.width<<" "<<self.view.frame.size.height<<endl;
  
  // _customPreviewerLayer.affineTransform = CGAffineTransformMakeRotation(M_PI/2.0);
  [self.view.layer addSublayer:_customPreviewerLayer];
  
  //Video output
  _dataOutput = [AVCaptureVideoDataOutput new];
  _dataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
  [_dataOutput setAlwaysDiscardsLateVideoFrames:YES];
  
  if([_captureSession canAddOutput:_dataOutput])
    [_captureSession addOutput:_dataOutput];
  
  //Image output
  _stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
  NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil];
  [_stillImageOutput setOutputSettings:outputSettings];
  if([_captureSession canAddOutput:_stillImageOutput])
    [_captureSession addOutput:_stillImageOutput];
  
  [_captureSession commitConfiguration];
  
  dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
  [_dataOutput setSampleBufferDelegate:self queue:queue];
  
  
  
}


-(void)initGL
{
  EAGLContext* context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
  CGRect rect = [UIScreen mainScreen].bounds;
  rect.size.height = rect.size.width/16*9;
  mGLView = [[GLKView alloc]initWithFrame:rect];
  mGLView.context = context;
  mGLView.delegate = self;
  
  [self.view addSubview:mGLView];
  [EAGLContext setCurrentContext:mGLView.context];
  glClearColor(0, 0, 0, 0);
  
}


void addOfflineMarker()
{
    cv::Mat M1(360,480,CV_8UC3),M2(360,480,CV_8UC3),M3(360,480,CV_8UC3),M4(360,480,CV_8UC3);
    memcpy(M1.data, marker1, sizeof(marker1));
    memcpy(M2.data, marker2, sizeof(marker2));
    
    memcpy(M3.data, marker3, sizeof(marker3));
    
    memcpy(M4.data, marker4, sizeof(marker4));
    double yaw = 0.0;
    double pitch =0.0;
    double roll = 0.0;
    Eigen::Matrix3d R = Solve3D::setRotationMatrix(yaw, pitch, roll);
    auto m = new Marker(M2,R,0);
    markerAR::ORB orb(10,2000);
    std::vector<cv::KeyPoint> kpts;
    cv::Mat desp;
    orb(M2,Mat(),kpts,desp);
    bow.addImage(kpts,desp);
    markerAR::map->addMarker(m);
    
    m = new Marker(M1,R,1);
    orb(M1,Mat(),kpts,desp);
    bow.addImage(kpts,desp);
    markerAR::map->addMarker(m);
    
    m = new Marker(M3,R,2);
    orb(M3,Mat(),kpts,desp);
    bow.addImage(kpts,desp);
    markerAR::map->addMarker(m);
    
    m = new Marker(M4,R,3);
    orb(M2,Mat(),kpts,desp);
    bow.addImage(kpts,desp);
    markerAR::map->addMarker(m);
    
    bow.train();

}

- (void)viewDidLoad {
  [super viewDidLoad];
  
  [self initGL];
  
  mFocal = 576  ;
  [self setupCameraSession];
  mVisualizer = [[Visualizer alloc] initWithSize:480 andHeight:270   andFocal:mFocal];
  // image = new GLubyte[360*480*4];
  image = new GLubyte[270*480*4];
  _motionManager = [[CMMotionManager alloc]init];
  [_motionManager startDeviceMotionUpdates];
  
  //inital
  int imgWidth = 480;
  int imgHeight =360;
  globalInit(imgWidth,imgHeight,focus);
    addOfflineMarker();
  markerNum = 0;
  searchNum = 2;
  isTrain = NO;
  isMatched = NO;
  isTrack = NO;
  backgroundIsRun = NO;
  m = Mode_Tracking;
    start = clock.now();
  [self initUI];
  saveId = 0;
  
  [_captureSession startRunning];
}

- (void)addContent:(int) keypointSize
{
  _numberLabel.text = [NSString stringWithFormat:@"特征点：%d", keypointSize];
  _fpsLabel.text = [NSString stringWithFormat:@"FPS = %3.2f",[self getFPS]];
  _markerCount.text = [NSString stringWithFormat:@"marker个数: %d",markerNum];
}

- (void)addContent:(int) keypointSize with: (int) matchpointSize with: (double) detectfps with: (double) matchfps
{
  _numberLabel.text = [NSString stringWithFormat:@"匹配点：%d/%d", keypointSize,matchpointSize];
  _fpsLabel.text = [NSString stringWithFormat:@"FPS=%3.2f/%3.2f",detectfps,matchfps];
  _markerCount.text = [NSString stringWithFormat:@"marker个数: %d",markerNum];
  NSString *list = [NSString stringWithFormat:@"匹配列表："];
  if(markerIdx.size() > 0)
  {
    for(int i = 0; i < searchNum; i++)
      list = [list stringByAppendingString:[NSString stringWithFormat:@" %d",markerIdx[i]]];
  }
  _markerList.text = list;
}

- (CMRotationMatrix)getRotationMatrix{
  CMDeviceMotion *motion = _motionManager.deviceMotion;
  CMAttitude *attitude = motion.attitude;
  pitch = attitude.pitch;
  yaw = attitude.yaw;
  roll = attitude.roll;
  return attitude.rotationMatrix;
}


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer       fromConnection:(AVCaptureConnection *)connection
{
  //convert to cv::Mat
  UIImage *imageOutput = [OpenCVController imageFromSampleBuffer:sampleBuffer];
  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  CVPixelBufferLockBaseAddress(imageBuffer,0);
  memcpy(image , (GLubyte *)CVPixelBufferGetBaseAddress(imageBuffer)+45*480*4 , 270*480*4);
  CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  Mat matOutput = [OpenCVController cvMatFromUIImage:imageOutput];
  for(int i = 0; i < 270 * 480 * 4; i+=4)
  {
    swap(image[i], image[i + 2]);
  }
  
  @synchronized (_imuLock) { imageObject = matOutput.clone();}
  if(m == Mode_Tracking||m == Mode_DrawImg)
  {
    static int frameID = -1;
    frameID++;
      Eigen::Matrix3d IMUR;
    [self getRotationMatrix];
    IMUR = Solve3D::setRotationMatrix(roll ,pitch,yaw);
    Mat R, t;
//    if (nowMarker == nullptr)
//    {
//      nowMarker = allMarkers[0].get();
//    }
      TickMeter tm;
      tm.start();
      double timestamp = std::chrono::duration_cast<std::chrono::milliseconds>(clock.now().time_since_epoch()-start.time_since_epoch()).count();
      cout<<timestamp<<endl;
    if(track->getState() == Tracking::Tracking_Lost){
      if (!relocal->getbg()){
        relocal_thread  = std::thread(&Relocalisation::tryRelocalisation,relocal,imageObject,timestamp);
        relocal_thread.detach();
      }
    }
    else if(track->getState() == Tracking::Not_Initialise){
        if (initialzer->m.try_lock()){
            initialzer->m.unlock();
            initial_thread = std::thread(&Initializer::tryInitialize,initialzer,std::ref(imageObject),timestamp);
            initial_thread.detach();
        }
    }
    else
    {
      track->tracking(imageObject,timestamp);
    }
      tm.stop();
      detectfps = tm.getTimeSec();
      if(track->getState() == Tracking::Tracking_Success)
          isCapture = true;
      else
          isCapture = false;
    CameraPose = track->getRenderP();
    if(CameraPose.size()>0)
      cout<<CameraPose[0].col(3)<<endl;
    //    DumpTrackingDataOfGrayImage(frameID,C.R,C.t);
    
    //convert to CGImageRef
    //    imageOutput = [OpenCVController UIImageFromCVMat:matOutput];
    //    imageOutput = [OpenCVController UIImageFromCVMat:debugimg];
    //   CGImageRef dstImageFilter = imageOutput.CGImage;
    
    //show image
      if (m == Mode_Tracking) {
          dispatch_sync(dispatch_get_main_queue(), ^{

          [self show];
          [mGLView display];
          return ;
          });

      }
      else{
          imageOutput = [OpenCVController UIImageFromCVMat:track->debugImg];
          CGImageRef dstImageFilter = imageOutput.CGImage;
          dispatch_sync(dispatch_get_main_queue(), ^{
              _customPreviewerLayer.contents = (__bridge id)dstImageFilter;
          });

      //       _customPreviewerLayer.contents = (__bridge id)dstImageFilter;
      //        if(isMatch == NO)
      //            [self addContent:keyPointSize];
      //        else
      //            [self addContent:keyPointSize with:matchpointSize with:detectfps with:matchfps];
      }
  }
  else if(m == Mode_BoWTest)
  {
//    if(bowORB.hasVoc())
//    {
//      vector<cv::KeyPoint> kpts;
//      Mat desp;
//      myorb->detect(imageObject,kpts);
//      myorb->compute(imageObject,kpts,desp);
//      auto result = bowORB.query(kpts,desp);
//     // if(result.size()>0)
//       // imageObject = allMarkers[result[0].first]->img.clone();
//    }
//      imageOutput = [OpenCVController UIImageFromCVMat:imageObject];
//    CGImageRef dstImageFilter = imageOutput.CGImage;
//    dispatch_sync(dispatch_get_main_queue(), ^{
//      _customPreviewerLayer.contents = (__bridge id)dstImageFilter;
//    });
    
  }
}

- (void)didReceiveMemoryWarning {
  [super didReceiveMemoryWarning];
  // Dispose of any resources that can be recreated.
}

@end
