package FocusCrawler::FirstOrder;
use warnings;
use strict;
our @ISA       = qw(FocusCrawler);
our $VERSION = 1.0;

use FocusCrawler qw(
            &is_target_anchor &get_canonical_url &get_host 
            %params %urls %dumps $frontier_db $frontier_cnt $target_cnt $failure_cnt
        );

use FocusCrawler::FirstOrder::DomParser;
use BerkeleyDB;
use YAML;
use List::MoreUtils qw(any);
use Language::Prolog::Yaswi qw(:load :assert :query);
use Language::Prolog::Sugar
	functors => [qw(text link parent child tag url target only), "rule00".."rule49"], 
	vars => ['X', 'A'..'E'];

our %tokens;
our @rules;
our @facts;
our %url2index;
our $next_index;
our $dom_parser;

my $adaptive_flag;
my $subgroup_cnt_frontier = 0;
my $subgroup_cnt_downloaded = 0;

sub init {
    my $package = shift;
    $package->SUPER::init(@_);
    $adaptive_flag = $params{adaptive_flag};
}

sub destroy {
    Storable::store(\@rules, $params{rules_dump_file});
}

sub display_additional_info {
    my $package = shift;
    my $info = $_[0];
    print STDERR '[', join(', ', @{$info->[2]}), ']' if @{$info->[2]};
}

sub display_state {
    my $package = shift;
    my $fh = $_[0] || '*STDERR';
    $package->SUPER::display_state(@_);
    print $fh "valid in frontier: $subgroup_cnt_frontier\n";
    print $fh "valid downloaded : $subgroup_cnt_downloaded\n";
    for my $rule_ref (@rules) {
        next if $rule_ref->[1] == 0;
        print $fh $rule_ref->[0], "\n";
        printf $fh "\tlearn: \ttotal: %d\tpos: %d\taccuracy: %.2f%%\n", $rule_ref->[4], $rule_ref->[5], 100 * $rule_ref->[5] / $rule_ref->[4];
        printf $fh "\ttest : \ttotal: %d\tpos: %d\taccuracy: %.2f%%\n\n", $rule_ref->[1], $rule_ref->[2], 100 * $rule_ref->[2] / $rule_ref->[1];
    }
}

sub prefetch_filter {
    my $package = shift;
    my ($priority) = @_;
    return $priority < 0;
}

sub post_check {
    my $package = shift;
    my ($url, $info) = @_;
    if (@{$info->[2]}) {
        $subgroup_cnt_frontier--;
    }
    return;
}

sub post_fetch {
    my $package = shift;
    my ($url, $info, $label) = @_;
    my $rules_ref = $info->[2];
    if (@$rules_ref) {
        $subgroup_cnt_downloaded++;
    }
    for (@$rules_ref) {
        $rules[$_][1]++;
        $rules[$_][2]++ if $label eq '+';
    }
}
     
sub parse_postback {
    my $package = shift;
    my ($content, $url, $base, $label, $value, $path, $digest) = @_;
    return 0 if $label eq '-';
    @facts = ();
    %url2index = ();
    $next_index = 0;
    my $links_count = 0;
    $dom_parser->process($content, $base, $digest, 0);
    if (@facts) {
        while (my($new_url, $new_index) = each %url2index) {
            my @rule_indexes;
            my $best_value = -1;
            for (0..$#rules) {
                if (swi_eval("rule$_(p$new_index)")) {
                    push @rule_indexes, $_;
                    my $h_value;
                    if ($adaptive_flag) {
                        $h_value = get_h_value($rules[$_][1], $rules[$_][2]);
                    }
                    else {
                        $h_value = $rules[$_][3];
                    }
                    $best_value = $h_value if $h_value > $best_value;
                }
            }
            #$best_value =  1 / (1 + exp(-$value)) if $value and $best_value < 0;
            if ( $package->add_URL_to_frontier($new_url, $best_value, [ [$url], $path.'L' ], \@rule_indexes ) ) {
                $links_count++;
            }
        }
        swi_retractall(@facts);
    }
    return $links_count;
}

sub facts_handler {
    my $fact = swi_parse(shift);
    swi_assert($fact);
    push @facts, $fact;

}

sub anchor2page {
    my ($anchor, $base, $digest) = @_;
    my $new_url = &get_new_url($anchor, $base);
    return undef unless defined $new_url;
    my $new_index = $url2index{$new_url};
    unless (defined $new_index) {
        $new_index = $next_index++;
        $url2index{$new_url} = $new_index;
    }
    return $new_index;
}

sub add_URL_to_frontier {
    my $package = shift;
    my ($url, $priority, $ref, $rules_ref) = @_;
    return 0 if $priority < 0;
    $rules_ref = [] if not defined $rules_ref;
    my $old_info = $urls{$url};
    if (defined $old_info) {
        my ($old_priority, $old_rules_ref) = ($old_info->[3], $old_info->[2]);
        die "priority cann't be reference.\n" if ref $old_priority;
        if ($priority > $old_priority) {
            $frontier_db->db_put($priority, $url);
            $subgroup_cnt_frontier++ if @$rules_ref and not @$old_rules_ref;
        }
        return 0;
    }
    else {
        $frontier_db->db_put($priority, $url);
        $frontier_cnt++;
        $urls{$url} = [@$ref, $rules_ref, $priority];
        $subgroup_cnt_frontier++ if defined $rules_ref and @$rules_ref;
        return 1;
    }
}

sub get_new_url {
    my ($anchor, $base) = @_;
    my ($href, $lang, $type) = ($anchor->attr('href'), $anchor->attr('hreflang'), $anchor->attr('type'));
    return undef unless $href and $href !~ /\s+/ and (not defined $lang or $lang =~ /^en/i) and (not defined $type or $type =~ m{^text/html}i);
    my $new_url = &get_canonical_url($href, $base);
    return undef unless defined $new_url;
    return undef if exists $urls{$new_url} and not ref $urls{$new_url};
    return $new_url;
}

sub get_h_value {
    my ($total, $pos) = @_;
    return 0 if $total == 0;
    my $ratio = $pos / $total;
    my $interval = 1.64 * sqrt($ratio * (1 - $ratio) / $total);
    return $ratio > $interval ? $ratio - $interval : 0;
}

sub reorder_frontier {
    my $package = shift;
    print STDERR "\n$frontier_cnt URLs found in the frontier.\n\n" unless $params{silent};

    my $cursor = $frontier_db->db_cursor();
    my ( $priority, $url ) = ( '', '' );
    while ( $cursor->c_get( $priority, $url, DB_NEXT ) == 0 ) {
        delete $urls{$url};
        $cursor->c_del();
    }
    undef $cursor;
    $frontier_cnt = 0;

    while (my ($url, $info) = each %urls) {
        if (ref $info) {
            print "The info of $url cann't be reference, press any key to end.\n";
            <STDIN>;
        }
    }

    my $count = 0;
    print STDERR "\nStart reordering URLs in the frontier...\n\n" unless $params{silent};
    while ( my ( $digest, $dump_ref ) = each %dumps ) {
        $count++;
        unless ($params{silent}) {
            if ($count % 200 == 0) {
                print STDERR "Record $count: $frontier_cnt ($subgroup_cnt_frontier)\n";
            }
        }
        next if $dump_ref->{no_follow};
        $package->parse_postback( $dump_ref->{content}, 'New',
            $dump_ref->{base}, $dump_ref->{label}, $dump_ref->{value}, 'N', $digest );
    }
    print STDERR
        "\nDone! $frontier_cnt URLs were reinserted into the frontier.\n\n" unless $params{silent};
}

sub learn {
    my $package = shift;
    my $config_file = $params{config_file};
#    my ($process_script, $learn_script) = @params{'process_script', 'learn_script'};
#    system $process_script, $config_file;
#    while (1) {
#        system $learn_script, $config_file;
#        print "Are you satisfied with the performance?(y/n)";
#        my $yesOrNo = <STDIN>;
#        last if $yesOrNo =~ /^y/i;
#    }
    $package->restore_apprentice();
}

sub restore_apprentice {
    open TOKENS, "< $params{dict_file}" or die "$!";
    while (<TOKENS>) {
        chomp;
        $tokens{$_} = 1;
    }
    close TOKENS;
    open RULES, "< $params{rules_file}" or die "$!";
    while (<RULES>) {
        next unless /^rule/;
        chomp(my $rule = $_);
        my ($pos, $neg) = <RULES> =~ /pos: (\d+) neg: (\d+) a:/;
        my $h_value = get_h_value($pos, $neg);
        push @rules, [$rule, 0, 0, $h_value, $pos + $neg, $pos];
    }
    close RULES;

    swi_consult($params{rules_file});

    $dom_parser  = new FocusCrawler::FirstOrder::DomParser \%tokens, \&anchor2page, \&facts_handler, \&is_target_anchor;
}
                             

1;
