#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <gtest/gtest.h>
#include "../task.h"
#include "../timer.h"

class TestTask: public datashuttle::BaseTask
{
public:
	/// is current task valid for running
	virtual bool isValid( ) const 
	{
		return true;//always return true in test
	}

	virtual void run()
	{
		long long timecurrent = datashuttle::timenow();
		long long delta = targetTime() - timecurrent;
		delta = abs(delta);
	//	EXPECT_LE(delta,5);//allow 1 millisecond error
		
		delete this;
	}

	virtual void cancel() 
	{
		return;//do nothing here
	}
};
void delay(long long t)
{
	 struct timespec spec;
     spec.tv_sec = t/1000;
     spec.tv_nsec = (t%1000)*1000*1000;
     nanosleep(&spec, 0);
}

TEST(TimerTEST,normal)
{
	datashuttle::TimerTable tt;
	tt.start( );

	int sec = 1000;
	int items = 100;
	for( int i = 0 ; i < sec ; i ++ )
	{
		for( int j = 0 ; j < items; j ++ )
		{
			TestTask* p = new TestTask();
			p->updateTimer(i + 800);
			tt.addTask(p,i + 800 );
		}
	}
	printf("extra count[%d]\n",tt.extraItemCount());
	long long timestart = datashuttle::timenow();
	int count  = 0;
	while( count < (sec* items) )
	{
		long long tte = 0;
		datashuttle::BaseTaskPtr p = 0;
		do
		{
	//		long long timecurrent = datashuttle::timenow();
	//		tte = tt.targetTime() - timecurrent;
	//		if( tte > 0 )
	//			break;
			p = tt.getTask( tte );
			if(p)
			{
				//printf("runnig....%d\n",count);
				p->run();
				count++;
			}		
		}while( p!= 0);
		if( tte > 1)
		{
			printf("sleep...\n");
			delay(tte);
		}
	}
	long long timestop = datashuttle::timenow();
	printf("total %d ----  %lld\n",count,timestop - timestart);

	std::vector<TestTask*> tasks;tasks.resize(100*1000);
	TestTask** t=&tasks[0];
	for( int i = 0 ; i < 100 * 1000 ; i++)
		t[i] = new TestTask();
	
	timestart = datashuttle::timenow();
	for( int i = 0 ; i < 100 * 1000 ; i ++ )	
	{
		t[i]->run();
	}
	timestop = datashuttle::timenow();
	printf("total %lld\n",timestop - timestart);
}

int main(int argc, char* argv[])
{
	testing::InitGoogleTest(&argc, argv);
	return RUN_ALL_TESTS();
}

