

<?php

use VDB\Spider\Discoverer\XPathExpressionDiscoverer;
use Symfony\Component\EventDispatcher\Event;
use VDB\Spider\Event\SpiderEvents;
use VDB\Spider\StatsHandler;
//use VDB\Spider\RequestHandler\GuzzleRequestHandler;
use LDL\Twspider\C\Timer\TimeRun;
use LDL\Twspider\C\Timer\ThreadJob;
use LDL\Twspider\C\Timer\TimeJob;
//use VDB\Spider\Spider;

use  LDL\Twspider\Spider ;
use  LDL\Twspider\C\StackArray ;
use  VDB\Spider\Stackable ;

// Create Spider
var_dump( $argv) ; 
//sleep(1);
require __DIR__ . '/../../vendor/autoload.php';
//$c=['R',"SSRR"];
$spider = new Spider('http://yeyel.top');

/*
 * include "ccc.php";
 * 
 * $a->a();
 */


//exit  ;


$spider->addTask(ThreadJob::getInstance(100000,function($job,$worker) use($spider){
	//var_dump($spider);
	//die("xxadsfdasfafa11111111");
	echo $worker->getCurrentThreadId()."=====-----\n";
	//die('xx')	
	$job->cancel();
	})
);


// Add a URI discoverer. Without it, the spider does nothing. In this case, we want <a> tags from a certain <div>


$spider->getDiscovererSet()->set(new XPathExpressionDiscoverer("//a")); // Set some sane options for this example. In this case, we only get the first 10 items from the start page.
$spider->getDiscovererSet()->maxDepth = 11;
$spider->getQueueManager()->maxQueueSize = 10;

// Let's add something to enable us to stop the script
function tta(Event $event) {
        echo "\nCrawl aborted by user.\n";
	//exit(); 
};

function aa(){
	$a="xxx";
	echo "qweqw\n";
	return function() use($a){
		echo "嘻嘻{$a}嘻嘻";
	};
};

/*
 * $d=array("a" ,'1' , '2' ,'3') ;
 * var_dump($d);
 * rsort($d);
 * var_dump($d);
 * exit;
 * //aa()();
 */

  $spider->getDispatcher()->addListener(
	  SpiderEvents::SPIDER_CRAWL_RESOURCE_PERSISTED,function($event,$name,$lastre,$disp){
	  
		  
            echo "\n[$name]\t:" . $event->getArgument('uri')->toString();
		  echo get_class($event)."\n";
	    //echo $event->getArgument("resource");
	    var_dump($lastre);
	    return array($lastre,"xxx1");
	  }
  );
 
// Add a listener to collect stats to the Spider and the QueueMananger.
// There are more components that dispatch events you can use.
$statsHandler = new StatsHandler();
$statsHandler2 = new StatsHandler();
//$spider->getQueueManager()->getDispatcher()->addSubscriber($statsHandler);

$spider->getDispatcher()->addSubscriber($statsHandler);
//$spider->getDispatcher()->addSubscriber($statsHandler2);
//var_dump($spider->getDispatcher());
//var_dump($d->getIterator());
//$d->sort();
/*
 * foreach($d as $dk=>$dv){
 *         var_dump($dk);
 *         var_dump($dv);
 * }
 */

/*
 * $d->dump();
 * $d->rewind() ;
 * 
 */

/*
 * $it=$d; 
 * $it->sort();
 * while ($it->valid())
 * {
 *             $key = $it->key();
 *                 $value = $it->current();
 * 
 *             var_dump($key.'-->');
 *             print_r($value);
 *             
 *                 $it->next();
 * }
 * 
 */

$spider->start();
// Execute crawl
//$spider->getDownloader()->getPersistenceHandler();
//$spider->crawl();

// Build a report
//echo "\n  ENQUEUED:  " . count($statsHandler->getQueued());
//echo "\n  SKIPPED:   " . count($statsHandler->getFiltered());
//echo "\n  FAILED:    " . count($statsHandler->getFailed());
//echo "\n  PERSISTED:    " . count($statsHandler->getPersisted());
//
//// Finally we could do some processing on the downloaded resources
//// In this example, we will echo the title of all resources
//echo "\n\nDOWNLOADED RESOURCES: ";
//
//foreach ($spider->getDownloader()->getPersistenceHandler() as $resource) {
//    echo "\n - " . $resource->getCrawler()->filterXpath('//title')->text();
//}



/*while(true){
	sleep(1000);
}*/
$spider->join();
