#!/usr/bin/perl -w

use strict;

use Cwd qw/abs_path/;
use File::Temp qw/tempdir/;
use lib './lib';

use WebPAC::Common 0.02;
use WebPAC::Parser 0.08;
use WebPAC::Input 0.16;
use WebPAC::Store 0.14;
use WebPAC::Normalize 0.22;
use WebPAC::Output::TT;
use WebPAC::Validate 0.06;
use WebPAC::Output::MARC;
use WebPAC::Config;
use Getopt::Long;
use File::Path;
use Time::HiRes qw/time/;
use File::Slurp;
use Data::Dump qw/dump/;
use Storable qw/dclone/;

use Proc::Queue size => 1;
use POSIX ":sys_wait_h"; # imports WNOHANG

=head1 NAME

run.pl - start WebPAC indexing

B<this command will probably go away. Don't get used to it!>

Options:

=over 4

=item --offset 42

start loading (all) databases at offset 42

=item --limit 100

limit loading to 100 records

=item --clean

remove database and Hyper Estraier index before indexing

=item --only=database_name/input_filter

reindex just single database (legacy name is --one)

C</input_filter> is optional part which can be C<name>
or C<type> from input

=item --config conf/config.yml

path to YAML configuration file

=item --stats

disable indexing, modify_* in configuration and dump statistics about field
and subfield usage for each input

=item --validate path/to/validation_file

turn on extra validation of imput records, see L<WebPAC::Validation>

=item --marc-lint

By default turned on if normalisation file has C<marc*> directives. You can disable lint
messages with C<--no-marc-lint>.

=item --marc-dump

Force dump or input and marc record for debugging.

=item --parallel 4

Run databases in parallel (aproximatly same as number of processors in
machine if you want to use full load)

=item --only-links

Create just links

=item --merge

Create merged index of databases which have links

=back

=cut

my $offset;
my $limit;

my $clean = 0;
my $config_path;
my $debug = 0;
my $only_filter;
my $stats = 0;
my $validate_path;
my $marc_lint = 1;
my $marc_dump = 0;
my $parallel = 0;
my $only_links = 0;
my $merge = 0;

my $log = _new WebPAC::Common()->_get_logger();

GetOptions(
	"limit=i" => \$limit,
	"offset=i" => \$offset,
	"clean" => \$clean,
	"one=s" => \$only_filter,
	"only=s" => \$only_filter,
	"config" => \$config_path,
	"debug+" => \$debug,
	"stats" => \$stats,
	"validate=s" => \$validate_path,
	"marc-lint!" => \$marc_lint,
	"marc-dump!" => \$marc_dump,
	"parallel=i" => \$parallel,
	"only-links!" => \$only_links,
	"merge" => \$merge,
);

my $config = new WebPAC::Config( path => $config_path );

#print "config = ",dump($config) if ($debug);

die "no databases in config file!\n" unless ($config->databases);

$log->info( "-" x 79 );

my $log_file = 'log';

if (-e $log_file ) {	# && -s $log_file > 5 * 1024 * 1024) {
	$log->info("moved old log with ", -s $log_file, " bytes to '${log_file}.old'");
	rename $log_file, "${log_file}.old" || $log->logwarn("can't rename $log_file to ${log_file}.old: $!");
}

my $estcmd_fh;
my $estcmd_path = './estcmd-merge.sh';
if ($merge) {
	open($estcmd_fh, '>', $estcmd_path) || $log->logdie("can't open $estcmd_path: $!");
	print $estcmd_fh 'cd /data/estraier/_node/ || exit 1',$/;
	print $estcmd_fh 'sudo /etc/init.d/hyperestraier stop',$/;
	$log->info("created merge batch file $estcmd_path");
}


my $validate;
$validate = new WebPAC::Validate(
	path => $validate_path,
) if ($validate_path);


my $use_indexer = $config->use_indexer;
$stats ||= $validate;
if ($stats) {
	$log->debug("disabled indexing for stats collection");
	$use_indexer = undef;
} else {
	$log->info("using $use_indexer indexing engine...");
}

# parse normalize files and create source files for lookup and normalization

my $parser = new WebPAC::Parser( config => $config );

my $total_rows = 0;
my $start_t = time();

my @links;

if ($parallel) {
	$log->info("Using $parallel processes for speedup");
	Proc::Queue::size($parallel);
}

sub create_ds_config {
	my ($db_config, $database, $input, $mfn) = @_;
	my $c = dclone( $db_config );
	$c->{_} = $database || $log->logconfess("need database");
	$c->{_mfn} = $mfn || $log->logconfess("need mfn");
	$c->{input} = $input || $log->logconfess("need input");
	return $c;
}

while (my ($database, $db_config) = each %{ $config->databases }) {

	my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter);
	next if ($only_database && $database !~ m/$only_database/i);

	if ($parallel) {
		my $f=fork;
		if(defined ($f) and $f==0) {
			$log->info("Created processes $$ for speedup");
		} else {
			next;
		}
	}

	my $indexer;
	if ($use_indexer && $parser->have_rules( 'search', $database )) {

		my $cfg_name = $use_indexer;
		$cfg_name =~ s/\-.*$//;

		my $indexer_config = $config->get( $cfg_name ) || $log->logdie("can't find '$cfg_name' part in confguration");
		$indexer_config->{database} = $database;
		$indexer_config->{clean} = $clean;
		$indexer_config->{label} = $db_config->{name};

		# force clean if database has links
		$indexer_config->{clean} = 1 if ($db_config->{links});

		if ($use_indexer eq 'hyperestraier') {

			# open Hyper Estraier database
			use WebPAC::Output::Estraier '0.10';
			$indexer = new WebPAC::Output::Estraier( %{ $indexer_config } );
		
		} elsif ($use_indexer eq 'hyperestraier-native') {

			# open Hyper Estraier database
			use WebPAC::Output::EstraierNative;
			$indexer = new WebPAC::Output::EstraierNative( %{ $indexer_config } );

		} elsif ($use_indexer eq 'kinosearch') {

			# open KinoSearch
			use WebPAC::Output::KinoSearch;
			$indexer_config->{clean} = 1 unless (-e $indexer_config->{index_path});
			$indexer = new WebPAC::Output::KinoSearch( %{ $indexer_config } );

		} else {
			$log->logdie("unknown use_indexer: $use_indexer");
		}

		$log->logide("can't continue without valid indexer") unless ($indexer);
	}


	#
	# store Hyper Estraier links to other databases
	#
	if (ref($db_config->{links}) eq 'ARRAY' && $use_indexer) {
		foreach my $link (@{ $db_config->{links} }) {
			if ($use_indexer eq 'hyperestraier') {
				if ($merge) {
					print $estcmd_fh 'sudo -u www-data estcmd merge ' . $database . ' ' . $link->{to},$/;
				} else {
					$log->info("saving link $database -> $link->{to} [$link->{credit}]");
					push @links, sub {
						$log->info("adding link $database -> $link->{to} [$link->{credit}]");
						$indexer->add_link(
							from => $database,
							to => $link->{to},
							credit => $link->{credit},
						);
					};
				}
			} else {
				$log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]");
			}
		}
	}
	next if ($only_links);


	#
	# now WebPAC::Store
	#
	my $abs_path = abs_path($0);
	$abs_path =~ s#/[^/]*$#/#;

	my $db_path = $config->webpac('db_path');

	if ($clean) {
		$log->info("creating new database '$database' in $db_path");
		rmtree( $db_path ) || $log->warn("can't remove $db_path: $!");
	} else {
		$log->info("working on database '$database' in $db_path");
	}

	my $store = new WebPAC::Store(
		path => $db_path,
		debug => $debug,
	);


	#
	# now, iterate through input formats
	#

	my @inputs;
	if (ref($db_config->{input}) eq 'ARRAY') {
		@inputs = @{ $db_config->{input} };
	} elsif ($db_config->{input}) {
		push @inputs, $db_config->{input};
	} else {
		$log->info("database $database doesn't have inputs defined");
	}

	foreach my $input (@inputs) {

		my $input_name = $input->{name} || $log->logdie("input without a name isn't valid: ",dump($input));

		next if ($only_input && ($input_name !~ m#$only_input#i && $input->{type} !~ m#$only_input#i));

		my $type = lc($input->{type});

		die "I know only how to handle input types ", join(",", $config->webpac('inputs') ), " not '$type'!\n" unless (grep(/$type/, $config->webpac('inputs')));

		my $input_module = $config->webpac('inputs')->{$type};

		my @lookups = $parser->have_lookup_create($database, $input);

		$log->info("working on input '$input_name' in $input->{path} [type: $input->{type}] using $input_module",
			@lookups ? " creating lookups: ".join(", ", @lookups) : ""
		);

		if ($stats) {
			# disable modification of records if --stats is in use
			delete($input->{modify_records});
			delete($input->{modify_file});
		}

		my $input_db = new WebPAC::Input(
			module => $input_module,
			encoding => $config->webpac('webpac_encoding'),
			limit => $limit || $input->{limit},
			offset => $offset,
			recode => $input->{recode},
			stats => $stats,
			modify_records => $input->{modify_records},
			modify_file => $input->{modify_file},
		);
		$log->logdie("can't create input using $input_module") unless ($input);

		if (defined( $input->{lookup} )) {
			$log->warn("$database/$input_name has depriciated lookup definition, removing it...");
			delete( $input->{lookup} );
		}

		my $lookup_coderef;

		if (@lookups) {

			my $rules = $parser->lookup_create_rules($database, $input) || $log->logdie("no rules found for $database/$input");

			$lookup_coderef = sub {
				my $rec = shift || die "need rec!";
				my $mfn = $rec->{'000'}->[0] || die "need mfn in 000";

				WebPAC::Normalize::data_structure(
					row => $rec,
					rules => $rules,
					config => create_ds_config( $db_config, $database, $input, $mfn ),
				);

				#warn "current lookup: ", dump(WebPAC::Normalize::_get_lookup());
			};

			WebPAC::Normalize::_set_lookup( undef );

			$log->debug("created lookup_coderef using:\n$rules");

		};

		my $lookup_jar;

		my $maxmfn = $input_db->open(
			path => $input->{path},
			code_page => $input->{encoding},	# database encoding
			lookup_coderef => $lookup_coderef,
			lookup => $lookup_jar,
			%{ $input },
			load_row => sub {
				my $a = shift;
				return $store->load_row(
					database => $database,
					input => $input_name,
					id => $a->{id},
				);
			},
			save_row => sub {
				my $a = shift;
				return $store->save_row(
					database => $database,
					input => $input_name,
					id => $a->{id},
					row => $a->{row},
				);
			},

		);

		my $lookup_data = WebPAC::Normalize::_get_lookup();

		if (defined( $lookup_data->{$database}->{$input_name} )) {
			$log->debug("created following lookups: ", sub { dump( $lookup_data ) } );

			foreach my $key (keys %{ $lookup_data->{$database}->{$input_name} }) {
				$store->save_lookup(
					database => $database,
					input => $input_name,
					key => $key,
					data => $lookup_data->{$database}->{$input_name}->{$key},
				);
			}
		}

		my $report_fh;
		if ($stats || $validate) {
			my $path = "out/report/${database}-${input_name}.txt";
			open($report_fh, '>', $path) || $log->logdie("can't open $path: $!");

			print $report_fh "Report for database '$database' input '$input_name' records ",
				$offset || 1, "-", $limit || $input->{limit} || $maxmfn, "\n\n";
			$log->info("Generating report file $path");
		}

		my $marc;
		if ($parser->have_rules( 'marc', $database, $input_name )) {
			$marc = new WebPAC::Output::MARC(
				path => "out/marc/${database}-${input_name}.marc",
				lint => $marc_lint,
				dump => $marc_dump,
			);
		}

		my $rules = $parser->normalize_rules($database,$input_name) || $log->logdie("no normalize rules found for $database/$input_name");
		$log->debug("parsed normalize rules:\n$rules");

		# reset position in database
		$input_db->seek(1);

		# generate name of config key for indexer (strip everything after -)
		my $indexer_config = $use_indexer;
		$indexer_config =~ s/^(\w+)-?.*$/$1/g if ($indexer_config);

		my $lookup_hash;
		my $depends = $parser->depends($database,$input_name);
	
		if ($depends) {
			$log->debug("$database/$input_name depends on: ", dump($depends)) if ($depends);
			$log->logdie("parser->depends didn't return HASH") unless (ref($depends) eq 'HASH');

			foreach my $db (keys %$depends) {
				foreach my $i (keys %{$depends->{$db}}) {
					foreach my $k (keys %{$depends->{$db}->{$i}}) {
						my $t = time();
						$log->debug("loading lookup $db/$i");
						$lookup_hash->{$db}->{$i}->{$k} = $store->load_lookup(
							database => $db,
							input => $i,
							key => $k,
						);
						$log->debug(sprintf("lookup $db/$i took %.2fs", time() - $t));
					}
				}
			}

			$log->debug("lookup_hash = ", sub { dump( $lookup_hash ) });
		}


		foreach my $pos ( 0 ... $input_db->size ) {

			my $row = $input_db->fetch || next;

			$total_rows++;

			my $mfn = $row->{'000'}->[0];

			if (! $mfn || $mfn !~ m#^\d+$#) {
				$log->warn("record $pos doesn't have valid MFN but '$mfn', using $pos");
				$mfn = $pos;
				push @{ $row->{'000'} }, $pos;
			}


			if ($validate) {
				if ( my $errors = $validate->validate_rec( $row, $input_db->dump_ascii ) ) {
					$log->error( "MFN $mfn validation error:\n",
						$validate->report_error( $errors )
					);
				}
				next;	# validation doesn't create any output
			}

			my $ds = WebPAC::Normalize::data_structure(
				row => $row,
				rules => $rules,
				lookup => $lookup_hash,
				config => create_ds_config( $db_config, $database, $input, $mfn ),
				marc_encoding => 'utf-8',
				load_row_coderef => sub {
					my ($database,$input,$mfn) = @_;
					return $store->load_row(
						database => $database,
						input => $input,
						id => $mfn,
					);
				},
			);

			$log->debug("ds = ", sub { dump($ds) }) if ($ds);

			$store->save_ds(
				database => $database,
				input => $input_name,
				id => $mfn,
				ds => $ds,
			) if ($ds && !$stats);

			$indexer->add(
				id => "${input_name}/${mfn}",
				ds => $ds,
				type => $config->get($indexer_config)->{type},
			) if ($indexer && $ds);

			if ($marc) {
				my $i = 0;

				while (my $fields = WebPAC::Normalize::_get_marc_fields( fetch_next => 1 ) ) {
					$marc->add(
						id => $mfn . ( $i ? "/$i" : '' ),
						fields => $fields,
						leader => WebPAC::Normalize::marc_leader(),
						row => $row,
					);
					$i++;
				}

				$log->info("Created $i instances of MFN $mfn\n") if ($i > 1);
			}
		}

		if ($validate) {
			my $errors = $validate->report;
			if ($errors) {
				$log->info("validation errors:\n$errors\n" );
				print $report_fh "$errors\n" if ($report_fh);
			}
		}

		if ($stats) {
			my $s = $input_db->stats;
			$log->info("statistics of fields usage:\n$s");
			print $report_fh "Statistics of fields usage:\n$s" if ($report_fh);
		}

		# close MARC file
		$marc->finish if ($marc);

		# close report
		close($report_fh) if ($report_fh)

	}

	eval { $indexer->finish } if ($indexer && $indexer->can('finish'));

	my $dt = time() - $start_t;
	$log->info("$total_rows records ", $indexer ? "indexed " : "",
		sprintf("in %.2f sec [%.2f rec/sec]",
			$dt, ($total_rows / $dt)
		)
	);


	# end forked process
	if ($parallel) {
		$log->info("parallel process $$ finished");
		exit(0);
	}

}

if ($parallel) {
	# wait all children to finish
	sleep(1) while wait != -1;
	$log->info("all parallel processes finished");
}

#
# handle links or merge after indexing
#

if ($merge) {
	print $estcmd_fh 'sudo /etc/init.d/hyperestraier start',$/;
	close($estcmd_fh);
	chmod 0700, $estcmd_path || $log->warn("can't chmod 0700 $estcmd_path: $!");
	system $estcmd_path;
} else {
	foreach my $link (@links) {
		$log->logdie("coderef in link ", Dumper($link), " is ", ref($link), " and not CODE") unless (ref($link) eq 'CODE');
		$link->();
	}
}
