#include "desktopstreaming.h"
#include <QDebug>
#include <QImage>
#include "H264Source.h"
#include "AACSource.h"

DesktopStreaming::DesktopStreaming(QWidget *parent)
    : QWidget(parent)
{
    ui.setupUi(this);
	fState.main_widget_ = this;
	connect(this, SIGNAL(newFrame()), this, SLOT(slotNewFrame()));
	connect(this, SIGNAL(newAudio()), this, SLOT(slotNewAudio()));
}

DesktopStreaming::~DesktopStreaming()
{

}

void DesktopStreaming::resetRefreshTimer()
{
	if (0 < ui_refresh_timer_id_)
	{
		killTimer(ui_refresh_timer_id_);
		ui_refresh_timer_id_ = -1;
	}
}

void DesktopStreaming::timerEvent(QTimerEvent* ev)
{
	if (0 < ui_refresh_timer_id_)
	{
		refreshVideo();
		ev->accept();
		return;
	}

	QWidget::timerEvent(ev);
}

void DesktopStreaming::refreshVideo()
{
	int w = 0, h = 0, ds = 0;
	std::shared_ptr<uint8_t> rawData;
	GetRawVideoFrame(&fState, w, h, ds, rawData);
	if (0 < w && 0 < h && rawData)
	{
		QImage img(rawData.get(), w, h, QImage::Format_RGBA8888);
		auto p = ui.desktop->palette();
		ui.desktop->setPixmap(QPixmap::fromImage(img));
		ui.desktop->setScaledContents(true);
	}
}

void DesktopStreaming::StartLive_()
{
	fState.rtsp_url = "rtsp://127.0.0.1:554/live/desktop";
	do
	{
		auto monitors = GetMonitors();
		for (auto itr = monitors.begin(); monitors.end() != itr; ++itr)
		{
			std::cout << itr->high_part << "," << itr->low_part << ":"
				<< itr->left << "->" << itr->right << ","
				<< itr->top << "->" << itr->bottom << std::endl;
		}
		if (monitors.empty())
			return;

		fState.width = monitors[0].right - monitors[0].left;
		fState.height = monitors[0].bottom - monitors[0].top;
		fState.desktop_x = monitors[0].left;
		fState.desktop_y = monitors[0].top;
		fState.is_monitor = true;

		bool bS = ffmpegOpenVideoInput(&fState);
		std::cout << "Open video input " << (bS ? "Success" : "Failed") << ",With: " << fState.width << "*" << fState.height << std::endl;
	} while (0);
	
	fState.pts_start_time = CurrentTimeMSec();
	if (fState.is_video_input_inited)
	{
		bool bS = StartVideoCapture(&fState);
		std::cout << "StartVideoCapture " << (bS ? "Success" : "Failed") << std::endl;

		if (bS)
		{
			bS = ffmpegOpenVideoOutput(&fState);
		}
		std::cout << "ffmpegOpenVideoOutput " << (bS ? "Success" : "Failed") << std::endl;
		if (bS)
		{
			OpenVideoConverter(&fState);

			bS = StartVideoEncode(&fState);
		}
		std::cout << "StartVideoEncode " << (bS ? "Success" : "Failed") << std::endl;
	}

	{
		bool bS = MicrosoftOpenAudioInput(&fState);
		std::cout << "MicrosoftOpenAudioInput " << (bS ? "Success" : "Failed") << std::endl;
		if (bS)
		{
			bS = StartAudioCapture(&fState);
			std::cout << "StartAudioCapture " << (bS ? "Success" : "Failed") << std::endl;
			if (bS)
			{
				bS = OpenAudioEncode(&fState);
			}
			std::cout << "OpenAudioEncode " << (bS ? "Success" : "Failed") << std::endl;
		
			if (bS)
			{
				bS = StartAudioEncode(&fState);
			}
			std::cout << "StartAudioEncode " << (bS ? "Success" : "Failed") << std::endl;
		}
	}

	if (ui.rtspPusher->isChecked())
	{
		jlh::MediaSession* session = jlh::MediaSession::CreateNew();
		//session->AddSource(jlh::channel_0, jlh::H264Source::CreateNew());
		session->AddSource(jlh::channel_1, jlh::AACSource::CreateNew(fState.audio_samplerate, fState.audio_channels, false));

		auto rtsp_pusher = jlh::RtspPusher::Create();
		rtsp_pusher->AddSession(session);
		if (0 != rtsp_pusher->OpenUrl(fState.rtsp_url))
		{
			qDebug() << "RTSP PUSHER Open Url. Failed at: " << fState.rtsp_url.c_str();
			rtsp_pusher.reset();
			return;
		}

		fState.rtsp_pusher_ = rtsp_pusher;
	}
	else if (ui.rtspPublish->isChecked())
	{
		QString ip = ui.ip->text().trimmed();
		int port = ui.port->text().trimmed().toInt();
		QString suffix = ui.suffix->text().trimmed();
		
		auto rtsp_server = jlh::RtspServer::Create();
		if (ip == "127.0.0.1") {
			ip = "0.0.0.0";
		}

		qDebug() << "Start Stream, " << ip << ":" << port << "," << suffix;
		rtsp_server->Start(ip.toLatin1().data(), port);

		jlh::MediaSession* session = jlh::MediaSession::CreateNew(suffix.toLatin1().data());
		session->AddSource(jlh::channel_0, jlh::H264Source::CreateNew());
		session->AddSource(jlh::channel_1, jlh::AACSource::CreateNew(fState.audio_samplerate, fState.audio_channels, false));
		jlh::MediaSessionId session_id = rtsp_server->AddSession(session);

		fState.rtsp_server_ = rtsp_server;
		media_session_id_ = session_id;
	}

}

void DesktopStreaming::StopLive_()
{
	fState.is_pusher_exit = true;
//
	Sleep(200);
//	StopRtspPusher(&fState);
	StopAudioEncode(&fState);
	StopAudioCapture(&fState);
	MicrosoftCloseAudioInput(&fState);
	StopVideoEncode(&fState);
	StopVideoCapture(&fState);
	ffmpegCloseVideoInput(&fState);
	ffmpegCloseVideoOutput(&fState);
	CloseVideoConverter(&fState);
//
//	CloseRtspPusher(&fState);
}

void DesktopStreaming::closeEvent(QCloseEvent* ev)
{
	StopLive_();
	resetRefreshTimer();
	if (fState.rtsp_pusher_)
	{
		fState.rtsp_pusher_.reset();
	}

	if (fState.rtsp_server_)
	{
		fState.rtsp_server_->Stop();
		fState.rtsp_server_.reset();
	}
	
	QWidget::closeEvent(ev);
}

void DesktopStreaming::StartLive()
{
	qDebug() << "StartLive";
	StartLive_();
	if (fState.is_video_encode_running)
	{
		ui_refresh_timer_id_ = startTimer(40);
		ui.btnStartLive->setEnabled(false);
		ui.btnStopLive->setEnabled(true);
	}
}

void DesktopStreaming::StopLive()
{
	qDebug() << "StopLive";
	StopLive_();
	resetRefreshTimer();
	if (!fState.is_video_encode_running)
	{
		ui.btnStartLive->setEnabled(true);
		ui.btnStopLive->setEnabled(false);
	}
	
}

void DesktopStreaming::PushVideo(std::shared_ptr<uint8_t>& data, int frame_size, int64_t timestamp)
{
	std::lock_guard<std::mutex> l(mutex_);
	vFrame vf;
	vf.data = data;
	vf.frame_size = frame_size;
	vf.timestamp = timestamp;
	//vf.type = jlh::VIDEO_FRAME_I;
	vframes_.push_back(vf);
	//std::cout << "E";
	emit newFrame();
}

void DesktopStreaming::PushAudio(std::shared_ptr<uint8_t>& data, int frame_size, int64_t timestamp)
{
	std::lock_guard<std::mutex> l(mutex_);
	vFrame af;
	af.timestamp = timestamp;
	af.data = data;
	af.frame_size = frame_size;
	aframes_.push_back(af);
	emit newAudio();
}

void DesktopStreaming::slotNewFrame()
{
	std::list<vFrame> frames;
	{
		std::lock_guard<std::mutex> l(mutex_);
		vframes_.swap(frames);
	}

	int nCount = frames.size();
	for (auto& f : frames)
	{
		if (fState.rtsp_pusher_) {
			fState.rtsp_pusher_->PushVideo(f.data.get(), f.frame_size, f.timestamp);
		}
		
		if (fState.rtsp_server_ && (0 < media_session_id_)) {
			fState.rtsp_server_->PushFrame(media_session_id_, jlh::channel_0, f.data, f.frame_size, f.timestamp);
		}
	}
}

void DesktopStreaming::slotNewAudio()
{
	std::list<vFrame> frames;
	{
		std::lock_guard<std::mutex> l(mutex_);
		aframes_.swap(frames);
	}

	int nCount = frames.size();
	for (auto& f : frames)
	{
		if (fState.rtsp_pusher_) {
			fState.rtsp_pusher_->PushAudio(f.data.get(), f.frame_size, f.timestamp);
		}
		if (fState.rtsp_server_ && (0 < media_session_id_)) {
			fState.rtsp_server_->PushFrame(media_session_id_, jlh::channel_1, f.data, f.frame_size, f.timestamp);
		}
	}
}


