#!/usr/bin/perl

use strict;
use warnings;

use Routeviews::Archive;
use Routeviews::Descriptions;
use DateTime;
use JSON;
use Digest::SHA1 qw/sha1_hex/;
use Regexp::Common;
use Regexp::Common::net::CIDR;
use Getopt::Std;
use LWP::Simple;
use File::stat;
use Unicode::String qw/utf8/;
use Data::Dumper;

my %opts;
getopts('hDfs:F:d:',\%opts);
my $fastmode = $opts{'f'};
my $samplemode = $opts{'s'};
my $filepath = $opts{'F'} || '/tmp/oix-full-snapshot-latest.dat.bz2';
my $url = 'http://mirrorbrain.org/routeviews/oix-full-snapshot-latest.dat.bz2';
my $delay = $opts{'d'} || 8; #in hours
my $debug = $opts{'D'};

sub usage {
    return <<EOF;
Usage: $0 -f -s 10000
    -h  --help:     this message
    -f  --fast:     fast mode (db_Main->{'AutoCommit'} = 0)
    -s  --sample:   sample mode (take 1 out of \$x lines, used for testing)
    -D  --debug:    debug mode
    -F  --filepath: specify the file path for the routeviews db (default: $filepath)

EOF
}

die(usage()) if($opts{'h'});

# if the file doesn't exist, download it
unless(-e $filepath){
    warn 'downloading file' if($debug);
    my $ret = mirror($url,$filepath);
    die('error code: '.$ret) unless($ret == 200);
} else {
    # if it exists, check to see if it's too old and should be refreshed
    my $st = stat($filepath) || die($!);
    if((time() - $st->ctime()) > (3600 * $delay)){
        warn 're-downloading data' if($debug);
        my $ret = mirror($url,$filepath);
        die('error code: '.$ret) unless($ret == 200);
    }
}

my $h = get_asns();
my $reg = get_registry();

sub redownload_file {
    my ($file,$time) = @_;
    return(1) unless(-e $file);
    my $st = stat($file) || die($!);
    return(1) if((time() - $st->ctime()) > (3600 * $time));
    return(0);
}   

# this will tell help us key the data in on hour
# eg: if the hour changes, the sha1 digest will change
my $unique;
my $dt = DateTime->from_epoch(epoch => time());
$dt = $dt->ymd().'T'.$dt->hour().'00:00Z';

# create the new db handle
my $a = Routeviews::Archive->new();
my $d = Routeviews::Descriptions->new();

# autocommit makes the script run longer
# use fastmode when we have zero records in the database, it'll prime it faster
unless($fastmode){
    $a->db_Main->{'AutoCommit'} = 1;
    $d->db_Main->{'AutoCommit'} = 1;
} else {
    warn 'using fastmode' if($debug);
}

# since this is a large file, pipe it open instead of loading into mem
open(F,"bzip2 -d -c $filepath|");
my $x = 0;
while(<F>){
    # this is useful in debugging, allows you to take a sampleset of the records
    # -s 10000 will usually take less than a min to load
    if($samplemode){
        next unless($x++ % $samplemode == 0);
    }
    ## TODO -- clean this up
    next unless(/^\*\s+($RE{'net'}{'CIDR'}{'IPv4'}).*$RE{'net'}{'IPv4'}\s+\d+\s+\d+\s+\d+\s(.*) [i|?]/);
    warn $x++ if($debug);

    # enter the fugly parser
    my @array = ($1,split(/\s/,$2));

    # this happens when a route has multiple homes
    if($array[$#array] =~ /^{(\S+)}$/){
        foreach (split(/,/,$1)){
            my @x = @array;
            # insert each home
            @x = ($x[0],$x[$#x-1],$_);
            insert(@x);
        }   
    } else {
        # our monitor point is directly peer'd with the as-path
        next if($#array < 3);
        if($#array > 2){
            # rv data shows the home-as multiple times
            while($array[$#array] =~ $array[$#array-1]){
                pop(@array); # eliminate them
            }
            # catch if it's something like 701 701 701 701 and all we're left with is a prefix
            next if($#array == 1);
            # correct mapping
            @array = ($array[0],$array[$#array],$array[$#array-1]);
        }
        insert(@array);
    }
}

sub insert {
    my ($prefix,$asn,$peer) = @_;

    # lets populate the unique hash, this helps us from having to check the dbfor unique()
    # memory is faster
    my $key = $prefix.','.$asn.','.$peer;
    return if(exists($unique->{$key}));

    # create the hash first, then we'll go to json
    my $hash = {
        prefix      => $prefix,
        asn         => $asn,
        asn_desc    => $h->{$asn},
        cc          => $reg->{$asn}->{'cc'},
        registry    => $reg->{$asn}->{'registry'},
        peer        => $peer,
        timestamp   => $dt,
    
    };
    if($peer){
        $hash->{'peer_desc'}        = $h->{$peer};
        $hash->{'peer_cc'}          = $reg->{$peer}->{'cc'};
        $hash->{'peer_registry'}    = $reg->{$peer}->{'registry'};
    };

    # create the data blob
    my $json = to_json($hash);
    
    # test an insert, if we're not in fastmode it'll unique against the sha1
    # if it fails on unique, it'll just skip it as shown below
    my $id = eval {
        $a->insert({
            asn     => $asn,
            type    => 'route',
            sha1    => sha1_hex($json),
            data    => $json,
        })
    };
    if($@){
        unless($@ =~ /unique/){
            warn Dumper($hash) if($debug);
            die($@);
        }
    }
    next unless($id);
    $unique->{$key} = $id;

    my $did = eval {
        $d->insert({
            archiveid   => $id->id(),
            %$hash
        })
    };
    #start HERE!!! TODO
    warn Dumper($did);
    if($@){
        unless($@ =~ /unique/){
            warn Dumper($hash) if($debug);
            die($@);
        }
    }
}
$a->dbi_commit() if($fastmode); #final commit if we're in fast mode
$d->dbi_commit() if($fastmode);
close(F);

sub get_asns{
    if(redownload_file('/tmp/asn-ctl.txt',12)){
        warn 'redownloading: /tmp/asn-ctl.txt';
        mirror('http://www.potaroo.net/bgp/iana/asn-ctl.txt','/tmp/asn-ctl.txt');
    }
    open(F,'/tmp/asn-ctl.txt');
    my @lines = <F>;
    my $hash;
    foreach (@lines){
        chomp();
        my @a = split(/\t/,$_);
        $a[3] = utf8($a[3]);
        $a[3] = $a[3]->utf8();
        $hash->{$a[0]} = $a[3];
    }
    return($hash);
}

sub get_registry {
    my @lines;
    foreach ('ftp://ftp.arin.net/pub/stats/arin/delegated-arin-latest', 'ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest', 'ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest', 'ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest', 'ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest'){
        m/(delegated-(\S+)-latest)/;
        my $f = '/tmp/'.$1;
        if(redownload_file($f,12)){
            warn 'redownloading: '.$f if($debug);
            `wget $_ -q -O $f`;
        }
        open(F,$f);
        my @a = <F>;
        push(@lines,@a);
    }
    my $hash;
    foreach(@lines){
        next unless(/^(\S+)\|(\S+)\|asn\|(\d+)/);
        $hash->{$3}->{cc} = $2;
        $hash->{$3}->{registry} = $1;
    }
    return($hash);
}
