#!/usr/bin/perl

# This will run periodically, called using:
#  perl pdb_interface_searchs.pl

use strict;
use warnings;

# http://search.cpan.org/~kwmak/PBS-Client/lib/PBS/Client.pm
use lib '/scratch/cmb433/pdb_interact/dependencies/PBS-Client-0.09/lib';
use PBS::Client;

use DBI;
use Data::Dumper;

# Limit of number of jobs in the queue. If there are more than this, we'll quit rather 
# than calling more analyze_pdb.pl instances.
my $queue_limit = 10;

# Limit of number of Perl scripts that can be run at one time,
# since analyze_pdb.pl runs locally (and then submits jobs to queue).
my $perl_limit = 10;

chdir('/scratch/cmb433/pdb_interact/');

# ------------------------------------------------------------------------------
# --- Query PDB for new entries
# ------------------------------------------------------------------------------

print STDERR "Querying PDB for new entries\n";

# Submit job to query PDB for new entries
my @cmds_query = ["module load perl", "perl scripts/query_new_pdb.pl -v > pdb_id_list_temp"];

my @emails = ['cmb433@nyu.edu'];

my $pbs = PBS::Client->new;
my $job_query = PBS::Client::Job->new(
	name => "query_new_pdb",
	wallt => "0:05:00", 
	cmd => \@cmds_query,
	maillist => @emails,
	mailopt => "bea",
);

my $retval_query = $pbs->qsub($job_query);

my @a_query =  @{$retval_query};
my $job_id_query = $a_query[0];

print STDERR "\tNow querying PDB (Job ID: " . $job_id_query . ")";

# Loop indefinitely until querying is complete

my $still_querying = 1;
my $qstat_cmd = "qstat -i " . $job_id_query . ".crunch.local | tail -n1";
	
while ($still_querying) {

	my $qstat_result = `$qstat_cmd`;
	
	my @qstat_info = split /\s+/, $qstat_result;


	if (defined ($qstat_info[9])) {
		if ($qstat_info[9] eq 'C') {
			$still_querying = 0;
		} else {
			print STDERR '.';
			sleep 5;
		}
	} else {
		# If it's undefined, assume we're done.
		$still_querying = 0;
	}
}

print "Finished!\n";

# Load new PDB IDs into memory. 
# They're stored in a temporary file, pdb_id_list_temp, as well as permanently
#  in pdb_id_lists/pdb_id_list_YYYY-MM-DD 

open (PDB_ID, "<pdb_id_list_temp"),
	or die "ERROR: Could not open temporary PDB ID list file.\n";
my @new_pdb_ids = <PDB_ID>;
close PDB_ID;

unlink "pdb_id_list_temp";

# ------------------------------------------------------------------------------
# --- Figure out permanent location of new PDB IDs, 
# --- in pdb_id_lists/pdb_id_list_YYYY-MM-DD 
# ------------------------------------------------------------------------------

my @all_runs = glob("pdb_id_lists/pdb_id_list_*");

my $latest_run_file;

if (scalar @all_runs > 0) {
	my @sorted_past_runs = sort { $a cmp $b } @all_runs;

	$latest_run_file = $sorted_past_runs[-1];

} else {
	print "ERROR: No PDB ID lists found in pdb_id_lists/pdb_id_list_YYYY-MM-DD\n";
	exit;
}

# ------------------------------------------------------------------------------
# --- If there are new PDB entries, submit job to add them to the backend DB
# ------------------------------------------------------------------------------

if (scalar (@new_pdb_ids) > 0) {
	print STDERR "Adding " . scalar (@new_pdb_ids) . " new PDB entries to database.\n";
	
	# Submit job to add new PDBs to database
	my @cmds_sql = ["module load perl", "perl scripts/add_new_PDB_to_DB.pl $latest_run_file"];

	my $job_sql = PBS::Client::Job->new(
		name => "add_new_pdb_to_db",
		wallt => "0:05:00", 
		cmd => \@cmds_sql,
		maillist => @emails,
		mailopt => "bea",
	);

	my $retval_sql = $pbs->qsub($job_sql);

	my @a_sql =  @{$retval_sql};
	my $job_id_sql = $a_sql[0];

	print STDERR "\tNow adding to DB (Job ID: " . $job_id_sql . ")";

	# Loop indefinitely until querying is complete

	$still_querying = 1;
	$qstat_cmd = "qstat -i " . $job_id_sql . ".crunch.local | tail -n1";
	
	while ($still_querying) {

		my $qstat_result = `$qstat_cmd`;
	
		my @qstat_info = split /\s+/, $qstat_result;
	
		if (defined ($qstat_info[9])) {
			if ($qstat_info[9] eq 'C') {
				$still_querying = 0;
			} else {
				print STDERR '.';
				sleep 5;
			}
		} else {
			# If it's undefined, assume we're done.
			$still_querying = 0;
		}

	}

	print STDERR "Finished!\n";

} else {
	print STDERR "No new PDB codes to add to the database.\n";
}

# ------------------------------------------------------------------------------
# --- Find PDBs and PDBs w/ chains need to be fed into analyze_pdb.pl.
# ------------------------------------------------------------------------------

# Search the pdb table for entries that need to be run
# These will be those that need the early steps, before Rosetta
my $pdb_to_do_sql = "SELECT pdb_id FROM pdb WHERE num_chains IS NULL;";

print "SQL: [" . $pdb_to_do_sql . "]\n";

# Search the pdb_partner table for chain pairs that need to be run
# This first group will be those that need steps before or during Rosetta
my $rosetta_to_do_sql = "SELECT pdb.pdb_id, pdb_partner.chain_1, pdb_partner.chain_2 ";
$rosetta_to_do_sql .= "FROM pdb, pdb_partner ";
$rosetta_to_do_sql .= "WHERE pdb_partner.rosetta_status != 'DONE' ";
$rosetta_to_do_sql .= "AND pdb.id = pdb_partner.pdb_id;";

print "SQL: [" . $rosetta_to_do_sql . "]\n";

# (Here, if desired, will be a query for those that need to have proc_ala_scan run)

# Query database

my $dbh = DBI->connect(
	"dbi:SQLite:dbname=workflow_tracker.db",
	"",
	"",
	{ RaiseError => 1 },
) or die $DBI::errstr;

my $sth = $dbh->prepare($pdb_to_do_sql);
$sth->execute();

# Reference to an array that has one reference per row.
my $pdb_to_do_ref = $sth->fetchall_arrayref();

my @pdb_to_do = @$pdb_to_do_ref;

if (@pdb_to_do) {
	print STDERR "There are " . scalar (@pdb_to_do);
	print STDERR " entries in pdb that need to be run.\n";
} else {
	print STDERR "There are no entries in pdb that need to be run.\n";
}

$sth->finish();

$sth = $dbh->prepare($rosetta_to_do_sql);
$sth->execute();

# Reference to an array that has one reference per row.
my $rosetta_to_do_ref = $sth->fetchall_arrayref();

my @rosetta_to_do = @$rosetta_to_do_ref;

if (@rosetta_to_do) {
	print STDERR "There are " . scalar (@rosetta_to_do);
	print STDERR " entries in pdb_partner that need to be run (Rosetta).\n";
} else {
	print STDERR "There are no entries in pdb_partner that need to be run.\n";
}
	
$sth->finish();

$dbh->disconnect();

# ------------------------------------------------------------------------------
# --- Call analyze_pdb.pl on all inputs that need to be run, pausing in between
# ------------------------------------------------------------------------------

foreach (@$pdb_to_do_ref) {

	# Before we procede, find out how many jobs are running
	my $running = "qstat -f -u cmb433 | sed '1,5d' | awk '{ print \$10 }' | grep -c 'R'";
	my $tot_running = `$running`;

	# And how many are in the queue, being held, or blocked
	my $queue = "qstat -f -u cmb433 | sed '1,5d' | awk '{ print \$10 }' | grep -c \"QHB\"";
	my $tot_queue = `$queue`;

	# Exit if we've submitted too many jobs and we're past our limit
	if ($tot_queue > $queue_limit) {
		print STDERR "Exiting upon hitting limit ";
		print STDERR "for number of jobs in queue ($queue_limit)\n";
	}
	
	# Check also how many instances of analyze_pdb.pl are running, since 
	# analyze_pdb runs on the login node and we don't want to overwhelm it.
	# (Just check for perl, since it's easier.)
	my $ps_cmd = "ps | grep -c 'perl'";
	my $tot_perl = `$ps_cmd`;
	
	# Exit if we have too many Perl scripts running
	if ($tot_perl > $perl_limit) {
		print STDERR "Exiting upon hitting limit ";
		print STDERR "for number of Perl scripts running simultaneously ($perl_limit)\n";
	}
	

	my $pdb_to_run = @{$_}[0];
	
	print STDERR "Runnning analyze_pdb.pl on $pdb_to_run\n";
	
	my $analyze_pdb_cmd = "perl scripts/analyze_pdb.pl $pdb_to_run ";
	$analyze_pdb_cmd   .= ">> tmp/$pdb_to_run.tmp.output 2>> tmp/$pdb_to_run.tmp.error";
	 
	# Call analyze_pdb.pl
	system ($analyze_pdb_cmd);
	
	# Pause to give it a chance to get started
	sleep 5;
	
}

print STDERR "No more entries in pdb or pdb_partner that need to be run for now.\n";

exit 0;