# Web-Scraper.pl

#!/usr/bin/perl

use strict;
use warnings;
use URI;
use Web::Scraper;

#data member declaration
my ($url,
	$teamdata,
	$result,
	$i);

# open file for writing
open FILE_W, ">", "files/UCITeamData.txt" or die $!;
	
# webpage to scrape
$url = "http://www.uciprotour.com/templates/UCI/UCI2/layout.asp?MenuId=MTU4MzI&LangId=1";

# for debugging purpose
print "\n--------------\n";
print "Checkpoint 1\n";
print "Continuing...\n";
print "--------------\n\n";

#prepare data
$teamdata = scraper {
	# for debugging purpose
	print "\n--------------\n";
	print "Checkpoint 2\n";
	print "Continuing...\n";
	print "--------------\n\n";
	
	# save urls from the teams
	process "table#UCITeamList > tr > td > a", 'urls[]' => '@href';
	
	# for debugging purpose
	print "\n--------------\n";
	print "Checkpoint 3\n";
	print "Continuing...\n";
	print "--------------\n\n";
	
	# save team names
	process "table#UCITeamList > tr > td > a", 'teams[]' => 'TEXT';
	
	# for debugging purpose
	print "\n--------------\n";
	print "Checkpoint 4\n";
	print "Continuing...\n";
	print "--------------\n\n";
	};
	
$result = $teamdata->scrape(URI->new($url));

# for debugging purpose
print "\n--------------\n";
print "Checkpoint 5\n";
print "Continuing...\n";
print "--------------\n\n";

for($i = 0; $result->{teams}[$i]; $i++) {
	if($i%3 != 0 && $i%3 != 2) {
		print "$result->{teams}[$i]\n";
		print FILE_W "$result->{teams}[$i]\n";
	}
}

# for debugging purpose
print "\n--------------\n";
print "Checkpoint 6\n";
print "--------------\n\n";

# close file
close(FILE_W);