use strict;
use warnings;

use File::Spec;
use FindBin;
use lib File::Spec->catfile($FindBin::RealBin, qw/.. lib/);
use Test::More;
use Test::Number::Delta within => 1e-4;

use Data::Dumper;

use Lingua::EN::WSD::WebBased::Vectorizer;
use Lingua::EN::WSD::WebBased::Vector;
use Lingua::EN::WSD::WebBased::Utils;

my $wn_path = File::Spec->catfile($FindBin::RealBin, qw/.. data wn21/);

my $wn = eval { load_wordnet($wn_path) };

plan skip_all => 'WordNet not loaded' if $@;

plan tests => 25;

my $v = new Lingua::EN::WSD::WebBased::Vectorizer(wordnet => $wn);

sub test_vectorizer {
    
    my ($str, $vector) = @_;
    is_deeply $v->vectorize($str), $vector, "vectorize: $str";

}

# test vectorizing

test_vectorizer "This is a cat. Those are cats",
{cat => 2};

test_vectorizer "This is a <b>cat</b>. Those are <i>cats</i>. &lt;test&gt;",
{cat => 2, test => 1};

test_vectorizer "I like sports, so I usually buy a lot of sporting goods.",
{like => 1, buy => 1, sport => 2, good => 1, usually => 1, sporting => 1, lot => 1};

# basic tests of sparse vector

my %vocabulary = (a => 1, b => 1, c => 1);
my $v1 = build_sparse_vector({ }, \%vocabulary);

is $v1->{a}, 0;
is $v1->{b}, 0;
is $v1->{c}, 0;

$v1->{d} = 2;
ok !exists $v1->{d};

is $v1->{a}, 0;
is $v1->{b}, 0;
is $v1->{c}, 0;

$v1->{a} = 1;

is $v1->{a}, 1;
is $v1->{b}, 0;
is $v1->{c}, 0;

delete $v1->{a};

is $v1->{a}, 0;
is $v1->{b}, 0;
is $v1->{c}, 0;

is_deeply([sort keys %$v1], [qw/a b c/]);

$v1->{a} = 1;
$v1->{b} = 2;
$v1->{c} = 3;
my (@k, @v);
while (my ($k, $v) = each %$v1) {
    push @k, $k;
    push @v, $v;
}

is_deeply([sort @k], [qw/a b c/]);
is_deeply([sort @v], [qw/1 2 3/]);

# test normalization

my $vocabulary = build_vocabulary([qw/a b c d e/]);
$v1 = build_sparse_vector({a => 1, b => 2, c => 1, d => 1}, $vocabulary);

(tied %$v1)->normalize;
delta_ok((tied %$v1)->flatten([qw/a b c d e/]), [qw/.2 .4 .2 .2 0/]);

# test smoothing

$v1 = build_sparse_vector({a => 1, b => 2, c => 3, d => 4}, $vocabulary);
(tied %$v1)->smooth;
delta_ok((tied %$v1)->flatten([qw/a b c d e/]), [qw/2 3 4 5 1/]);

$v1 = build_sparse_vector({a => 1, b => 1, c => 2, d => 2, e => 3, f => 4, g => 5},
    build_vocabulary([qw/a b c d e f g h i/]));
(tied %$v1)->smooth;
delta_ok((tied %$v1)->flatten([qw/a b c d e f g h i/]),
    [qw/1.2299 1.2299 1.8988 1.8988 2.5719 3.2472 3.9236 1 1/]);

delta_ok((tied %$v1)->unseen, 1);
is(scalar keys %{(tied %$v1)->seen}, 7);

$v1 = build_sparse_vector({a => 1, b => 1}, build_vocabulary([qw/a b c/]));
delta_ok((tied %$v1)->smooth->normalize->flatten([qw/a b c/]), [qw/.4 .4 .2/]);

