#include <iostream>
#include <sstream>
#include <fstream>
#include <QApplication>
#include <QThread>
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>

#include "base/pose_io.hh"
#include "base/global_param.hh"
#include "base/frame_grabber.hh"
#include "base/camcorder.hh"
#include "base/util.hh"
#include "mbt/object3d.hh"
#include "mbt/view.hh"
#include "mbt/tracker.hh"

int TrackLive(const std::string& config_file) {
	std::cout << std::endl << "########################" << std::endl;
	tk::GlobalParam* gp = tk::GlobalParam::Instance();
	gp->ParseConfig(config_file);

	tk::FrameGrabber* grabber = tk::FrameGrabber::GetFrameGrabber(gp->frames);
	CHECK(grabber) << "ERROR: check <frames> in config file.";
	gp->image_width = grabber->width;
	gp->image_height = grabber->height;
	cv::Matx33f K = cv::Matx33f(gp->fx, 0, gp->cx, 0, gp->fy, gp->cy, 0, 0, 1);
	cv::Matx14f distCoeffs = cv::Matx14f(0.0, 0.0, 0.0, 0.0);

	PoseReader *pose_reader = new PoseReaderRBOT;
	std::vector<std::vector<cv::Matx44f> > gt_poses;
	pose_reader->Read(gp->gt_pose_file, gt_poses);

	std::vector<float> distances = { 200.0f, 400.0f, 600.0f };
	std::vector<Object3D*> objects;
	for (int i = 0; i < gp->model_file.size(); ++i) {
		objects.push_back(new Object3D(gp->model_file[i], gt_poses[i][0], 1.0, 0.55f, distances));
		objects[i]->fcount = 0;
	}

	View* view = View::Instance();
	view->init(K, gp->image_width, gp->image_height, gp->zn, gp->zf, 4);

	PoseWriter pose_writer(config_file.substr(0, config_file.size()-4)+".tk", config_file, objects.size());

	Tracker* poseEstimator = Tracker::GetTracker(gp->tracker_mode, K, distCoeffs, objects);

	int timeout = gp->timeout;
	int fid = 0;
	CHECK(fid >= 0);

	cv::Mat frame;
	bool is_init = false;

	tk::Camcorder camcorder(gp->bench_case + ".avi", gp->image_width, gp->image_height, true);

	while (true) {
		int key = cv::waitKey(timeout);
		if (27 == key)
			break;
		if ('a' == key) {
			for (int i = 0; i < objects.size(); ++i) {
				poseEstimator->ToggleTracking(frame, i, false);
			}

			is_init = true;
		}

		if (!grabber->GrabFrame(frame))
			break;

		if (is_init) {
			std::stringstream sstr;
			camcorder.Record(frame);
			//sstr << gp->bench_case << "/rgb_" << std::setw(4) << std::setfill('0') << fid << ".png";
			//cv::imwrite(sstr.str(), frame);

			poseEstimator->EstimatePoses(frame, gp->bench_mode);
			poseEstimator->UpdateHist(frame);

			for (int i = 0; i < objects.size(); ++i) {
				cv::Matx44f tk_pose = objects[i]->getPose();
				pose_writer.Record(tk_pose, fid);
				//std::cout << "==" << i << "==" << std::endl << tk_pose << std::endl;
			}

			fid++;
		}

		if (gp->show_result) {
			cv::Mat result = view->DrawResultOverlay(std::vector<Model*>(objects.begin(), objects.end()), frame);
			//cv::Mat result = view->DrawMeshOverlay(std::vector<Model*>(objects.begin(), objects.end()), frame);
			
			if (result.cols > 1440) {
				cv::Mat small_out;
				cv::resize(result, small_out, cv::Size(result.cols*0.75, result.rows*0.75));
				cv::imshow(config_file, small_out);
			}	else {
				cv::imshow(config_file, result);
			}
		}
	}

	std::cout << "#######################" << std::endl;

	View::Instance()->destroy();

	for (int i = 0; i < objects.size(); i++) {
		delete objects[i];
	}
	objects.clear();
	delete poseEstimator;
}

int main(int argc, char* argv[]) {
	QApplication a(argc, argv);
	TrackLive(argv[1]);
	return 0;
}