#!/usr/bin/perl

use DBI;
use Cwd;
use POSIX;
use strict;

# these hashes map column names in tables to their order
my %observation;
my %calibrators;
($observation{'source'},$observation{'ra'},$observation{'dec'},$observation{'n_if'},
 $observation{'if_freq'},$observation{'if_nchans'},$observation{'if_bw'},$observation{'time'},
 $observation{'arrayname'},$observation{'project_code'},$observation{'rpfits_file'},
 $observation{'cycle_time'},$observation{'offsource'},$observation{'onsource'},
 $observation{'scantype'},$observation{'calcode'},$observation{'stations'},
 $observation{'header_number'},$observation{'num_processed'},$observation{'processed_ids'},
 $observation{'id'},$observation{'az'},$observation{'el'},$observation{'attenmm'},
 $observation{'subrefl'},$observation{'corr_cfg'},$observation{'airtemp'},
 $observation{'chi'},$observation{'evector'},$observation{'jyperk'},
 $observation{'pntra'},$observation{'pntdec'},$observation{'relhumid'},
 $observation{'smonrms'},$observation{'wind'},$observation{'winddir'},
 $observation{'xtsys'},$observation{'ytsys'},$observation{'xyphase'},$observation{'xcaljy'},
 $observation{'ycaljy'},$observation{'xgtp'},$observation{'ygtp'},$observation{'xsdo'},
 $observation{'ysdo'},$observation{'pointcor'},$observation{'point_time'},
 $observation{'point_source'},$observation{'point_az'},$observation{'point_el'})=
    (0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,
     31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49);
($calibrators{'name'},$calibrators{'ra'},$calibrators{'dec'},$calibrators{'type'},
 $calibrators{'low_freq'},$calibrators{'high_freq'},$calibrators{'bandpass_21cm'},
 $calibrators{'bandpass_13cm'},$calibrators{'bandpass_6cm'},$calibrators{'bandpass_3cm'},
 $calibrators{'bandpass_12mm'},$calibrators{'bandpass_7mm'},$calibrators{'bandpass_3mm'})=
    (50,51,52,53,54,55,56,57,58,59,60,61,62);

# the query we use to get data to reduce - this gets all observations where the name matches
# that of a known calibrator, that hasn't yet been processed, and that has an IF with 2 GHz
# bandwidth, and returns up to 100 such observations, starting with the earliest and continuing
# in time order
my $get_query_parameters="*";
my $get_query_source="observation,calibrators";
my @get_query_constraints=("calibrators.name=observation.source","observation.num_processed=0",
			   "2048=any(observation.if_bw)","time>'2009-05-01 00:00:00'");
my $get_query_order="time";
my $get_query_limit=100;

# some other defaults
# the working directory
my $working_directory="/data/MENTOK_2/ste616/flux_monitoring/staging_area";
my $minimum_ants=4;
my $minimum_time=10; # in seconds
my $bad_ants=2; # if the number of antennae on-source is less than or equal to this, we will
                # mark this observation as unusable

my $output_debug=0;
my $delete_process=1;
# get our our command line arguments
for (my $i=0;$i<=$#ARGV;$i++){
    if ($ARGV[$i] eq "-D"){
	# level up debug printing
	$output_debug++;
    } elsif ($ARGV[$i] eq "-F"){
	# only calibrate flux calibrators (which is the simplest thing to do)
	push @get_query_constraints,"calibrators.type='f'";
    } elsif ($ARGV[$i] eq "-C"){
	# only calibrate non-flux calibrators
	push @get_query_constraints,"calibrators.type='p'";
    } elsif ($ARGV[$i] eq "-cm"){
	# only calibrate cm calibrators
	push @get_query_constraints,"10000>any(observation.if_freq)";
    } elsif ($ARGV[$i] eq "-L"){
	# change the limit
	$i++;
	$get_query_limit=$ARGV[$i];
    } elsif ($ARGV[$i] eq "-R"){
	# allow reprocessing, ie. include already processed calibrators
	for (my $j=0;$j<=$#get_query_constraints;$j++){
	    if ($get_query_constraints[$j] eq "observation.num_processed=0"){
		$get_query_constraints[$j]="";
		last;
	    }
	}
    } elsif ($ARGV[$i] eq "-W"){
	# change the working directory
	$i++;
	$working_directory=$ARGV[$i];
    } elsif ($ARGV[$i] eq "-A"){
	# set the minimum number of antennas on-source
	$i++;
	$minimum_ants=$ARGV[$i];
    } elsif ($ARGV[$i] eq "-T"){
	# set the minimum time on-source
	$i++;
	$minimum_time=$ARGV[$i];
    } elsif ($ARGV[$i] eq "-d"){
	# don't delete the final processed files
	$delete_process=0;
    }
}

my $get_query="SELECT ".$get_query_parameters." FROM ".$get_query_source;
if ($#get_query_constraints>=0){
    $get_query.=" WHERE ";
    for (my $i=0;$i<=$#get_query_constraints;$i++){
	if ($get_query_constraints[$i] eq ""){
	    next;
	}
	if ($i>0){
	    $get_query.=" AND ";
	}
	$get_query.=$get_query_constraints[$i];
    }
}
if ($get_query_order ne ""){
    $get_query.=" ORDER BY ".$get_query_order;
}
if ($get_query_limit!=-1){
    $get_query.=" LIMIT ".$get_query_limit;
}
 
if ($output_debug>0){
    print "data selection query: $get_query\n";
}

# database details
my $dsn="dbi:Pg:dbname=atca;host=mentok;port=5432";
my $user="postgres";
my $password="";

# Setup the database connection
my $dbh=DBI->connect($dsn,$user,$password,{ RaiseError => 0, AutoCommit => 1});

# get the data
my $get=$dbh->prepare($get_query);
$get->execute;
while(my @get_response=$get->fetchrow){
    if ($output_debug>0){
	print "\n\n=== PROCESSING STARTS ===\n";
	print "source: ".$get_response[$observation{'source'}];
	print "   calibrator type: ".$get_response[$calibrators{'type'}];
	print "   id: ".$get_response[$observation{'id'}]."\n";
    }
    # what to do?
    if ($get_response[$calibrators{'type'}] eq "f"){
	# we have a flux calibrator, so all we need to do is calibrate it
	# and store it in the database
	# check that it meets the minimum on-source antennae and time
	# constraints
	my @onsource_cycles=&postgresql_array_get($get_response[$observation{'onsource'}]);
	my $ants_onsource=0;
	for (my $i=0;$i<=$#onsource_cycles;$i++){
	    if ($output_debug>2){
		print "antenna ".($i+1).": $onsource_cycles[$i] cycles on-source\n";
	    }
	    if ($onsource_cycles[$i]>0){
		$ants_onsource++;
	    }
	}
	if ($ants_onsource<$minimum_ants){
	    if ($output_debug>0){
		print "observation only has $ants_onsource antennae on-source; we will not".
		    " reduce this data\n";
	    }
	    if ($ants_onsource<=$bad_ants){
		if ($output_debug>0){
		    print "observation has less than ".($bad_ants+1)." antennae on-source;".
			" this observation will be marked as unusable\n";
		}
		&observation_set_unusable($dbh,$get_response[$observation{'id'}]);
	    }
	    next;
	}
	if ($output_debug>0){
	    print "observation has $ants_onsource antennae on-source\n";
	}
	my ($mincycles,$maxcycles)=&minmax(@onsource_cycles);
	my $max_obstime=$maxcycles*$get_response[$observation{'cycle_time'}];
	if ($max_obstime<$minimum_time){
	    if ($output_debug>0){
		print "observation has only $max_obstime seconds of data; we will not".
		    " reduce this data\n";
	    }
	    next;
	}
	if ($output_debug>0){
	    print "observation has $max_obstime seconds of observation time\n";
	}
	# step 1: extract the data
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 1. extract data\n\n";
	}
	my $extracted_fits=
	    &extract_rpfits($get_response[$observation{'rpfits_file'}],
			    $get_response[$observation{'header_number'}],
			    $get_response[$observation{'source'}],
			    $get_response[$observation{'time'}],$working_directory);

	if ($extracted_fits eq ""){
	    # couldn't get a file!
	    exit;
	}

	# step 2: determine which IFs we want from this file
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 2. determine usable IFs\n\n";
	}
	my @if_bw=&postgresql_array_get($get_response[$observation{'if_bw'}]);
	my @if_freq=&postgresql_array_get($get_response[$observation{'if_freq'}]);
	my @if_nchans=&postgresql_array_get($get_response[$observation{'if_nchans'}]);
	my @usable_ifs;
	my @usable_if_freqs;
	my @usable_if_nchans;
	my @usable_if_bws;
	for (my $i=0;$i<=$#if_bw;$i++){
	    # check that we have 2 GHz bandwidth
	    if ($if_bw[$i]==2048){
		# it's a continuum band
		# check that we haven't already added an IF with the same frequency
		my $already_added=0;
		for (my $j=0;$j<=$#usable_ifs;$j++){
		    if ($if_freq[$usable_ifs[$j]]==$if_freq[$i]){
			# already got this frequency
			$already_added=1;
		    }
		}
		if ($already_added==0){
		    # we have another usable IF
		    push @usable_ifs,$i;
		    push @usable_if_freqs,$if_freq[$i];
		    push @usable_if_nchans,$if_nchans[$i];
		    push @usable_if_bws,$if_bw[$i];
		}
	    }
	}
	if ($output_debug>0){
	    print "found ".($#usable_ifs+1)." usable IFs:\n";
	    for (my $i=0;$i<=$#usable_ifs;$i++){
		print " IF ".($usable_ifs[$i]+1).": freq = ".$if_freq[$usable_ifs[$i]]."; bw = ".$if_bw[$usable_ifs[$i]].
		    "; chans = ".$if_nchans[$usable_ifs[$i]]."\n";
	    }
	}

	# step 3: load the data into MIRIAD format
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 3. load RPFITS into MIRIAD format\n\n";
	}
	my $rc=&fits_to_miriad($extracted_fits,$get_response[$observation{'source'}],
			       $working_directory,@usable_ifs);
	if ($rc!=0){
	    # something went wrong, mark the observation as unusable
	    &observation_set_unusable($dbh,$get_response[$observation{'id'}]);
	    if ($delete_process==1){
		&clean_up($get_response[$observation{'source'}],$extracted_fits,
			  $working_directory,@usable_if_freqs);
	    }
	    next;
	}
	
	# step 4: flag based on calibration events
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 4. calibration event flagging\n\n";
	}
	&event_flag($dbh,$get_response[$observation{'source'}],
		    $get_response[$observation{'time'}],$max_obstime,
		    $get_response[$observation{'cycle_time'}],$working_directory,
		    @usable_if_freqs);

	# step 5: calibrate, round 1
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 5. calibration, round 1\n\n";
	}
	&calibrate($get_response[$observation{'source'}],
		   $get_response[$calibrators{'type'}],$working_directory,
		   @usable_if_freqs);

	# step 6: autoflag!
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 6. automatic RFI flagging\n\n";
	}
	&autoflag($get_response[$observation{'source'}],$working_directory,
		  @usable_if_freqs);

	# step 7: check that we still have data left
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 7. determine data flagging state\n\n";
	}
	my @flaglevels=&check_flagging($get_response[$observation{'source'}],$working_directory,
				       @usable_if_freqs);
	# and change the usable IF freqs accordingly
	my @original_usable_if_freqs=@usable_if_freqs;
	for (my $i=0;$i<=$#usable_if_freqs;$i++){
	    if ($flaglevels[$i]==100.0){
		# this IF has been completely flagged, so we drop it
		splice @usable_if_freqs,$i,1;
		splice @usable_if_nchans,$i,1;
		splice @usable_if_bws,$i,1;
		splice @usable_ifs,$i,1;
		splice @flaglevels,$i,1;
		$i--;
	    }
	}
	# check if we have any IFs left
	if ($#usable_if_freqs<0){
	    # nope
	    if ($output_debug>0){
		print "all IFs have been completely flagged...\n";
	    }
	    &observation_set_unusable($dbh,$get_response[$observation{'id'}]);
	    if ($delete_process==1){
		&clean_up($get_response[$observation{'source'}],$extracted_fits,
			  $working_directory,@original_usable_if_freqs);
	    }
	    next;
	}

	# step 8: average the data
	my @new_if_nchans;
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 8. average data over channels\n\n";
	}
	@new_if_nchans=&average_data($get_response[$observation{'source'}],$working_directory,
					    ($#usable_if_freqs+1),@usable_if_freqs,@usable_if_nchans);

	# step 9: calibrate, round 2
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 9. calibrate, round 2\n\n";
	}
	my $cal_success=&calibrate($get_response[$observation{'source'}],
				   $get_response[$calibrators{'type'}],$working_directory,
				   @usable_if_freqs);
	if ($cal_success<0){
	    # we haven't got a calibrated dataset
	    if ($output_debug>0){
		print "unable to calibrate dataset\n";
	    }
	    &observation_set_unusable($dbh,$get_response[$observation{'id'}]);
	    if ($delete_process==1){
		&clean_up($get_response[$observation{'source'}],$extracted_fits,
			  $working_directory,@original_usable_if_freqs);
	    }
	    next;
	}
	
	# step 10: get all the extra data we need
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 10. obtain extra metadata\n\n";
	}
	my @datrefs=&get_parameters($get_response[$observation{'source'}],
				    $extracted_fits,$working_directory,@usable_if_freqs);
	my %extradata=%{$datrefs[0]};
	if ($output_debug>1){
	    print "result of get_parameters:\n";
	    print "airtemp = ".$extradata{"airtemp_if$usable_if_freqs[0]"}."\n";
	    print "xtsys = ".&array_to_text(@{$extradata{"xtsys_if$usable_if_freqs[0]"}})."\n";
	}

	# step 11: put all this metadata information into the database now
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 11. update database\n\n";
	}
	&update_database_metadata($dbh,$get_response[$observation{'id'}],\%extradata,
				  ($#if_freq+1),($#usable_if_freqs+1),
				  @if_freq,@if_bw,@usable_if_freqs);

	# we do the rest of the procedure one IF at a time
	my $num_added=0;
	for (my $i=0;$i<=$#usable_if_freqs;$i++){
	    # step 12: make some flux and phase stability measurements
	    if ($output_debug>0){
		print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
		print "step 12. flux and phase measurements\n\n";
	    }
	    my @measrefs=&make_measurements($get_response[$observation{'source'}],
					    $working_directory,$usable_if_freqs[$i]);
	    my %measurements=%{$measrefs[0]};

	    # step 13: compress the calibrator dataset
	    if ($output_debug>0){
		print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
		print "step 13. archiving and compressing dataset\n\n";
	    }
	    &compress_dataset($get_response[$observation{'source'}],$working_directory,
			      $usable_if_freqs[$i]);
	
	    # step 14: make a new processed observation item in the database, and insert
	    # the values and data into it
	    if ($output_debug>0){
		print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
		print "step 14. adding flux data to database\n\n";
	    }
	    my $new_fluxdata_id=
		&new_fluxdata_entry($dbh,$get_response[$observation{'id'}],
				    $get_response[$observation{'source'}],
				    $usable_if_freqs[$i],$new_if_nchans[$i],
				    $usable_if_bws[$i],($usable_ifs[$i]+1),1,1,0,
				    0,0,0,$working_directory,\%measurements);
	    
	    if ($new_fluxdata_id>0){
		# step 15: add this new item into the observation database as a processed
		# observation
		if ($output_debug>0){
		    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
		    print "step 15. updating processed flags and links\n\n";
		}
		&add_processed($dbh,$get_response[$observation{'id'}],$new_fluxdata_id);
		$num_added++;
	    }
	}

	# if no flux data entries were made, we mark this observation as unusable
	if ($num_added==0){
	    &observation_set_unusable($dbh,$get_response[$observation{'id'}]);
	}
	
	# step 16: clean up
	if ($output_debug>0){
	    print "\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n";
	    print "step 16. clean up\n\n";
	}
	if ($delete_process==1){
	    &clean_up($get_response[$observation{'source'}],$extracted_fits,
		      $working_directory,@original_usable_if_freqs);
	}
    }
}

$dbh->disconnect;

sub add_processed {
    my ($dbh,$obsid,$procid)=@_;

    # figure out how many processed items are already present
    my $gen_query="SELECT num_processed FROM observation WHERE id=".$obsid;
    if ($output_debug>0){
	print "executing query: $gen_query\n";
    }
    my $gen=$dbh->prepare($gen_query);
    $gen->execute;
    my $num_processed;
    while (my ($procres)=$gen->fetchrow){
	$num_processed=$procres;
    }
    $gen->finish;

    # one more now!
    $num_processed++;
    my $upd_query="UPDATE observation SET (num_processed,processed_ids[".$num_processed."])".
	" = (".$num_processed.",".$procid.") WHERE id=".$obsid;
    if ($output_debug>0){
	print "executing query: $upd_query\n";
    }
    my $upd=$dbh->prepare($upd_query);
    $upd->execute;
    $upd->finish;
}

sub new_fluxdata_entry {
    my ($dbh,$id,$source,$freq,$nchan,$bw,$ifnum,$fluxproc,$bandproc,$phaseproc,
	$fluxid,$bandid,$fluxscale,$working_directory,$measurementsptr)=@_;
    
    # preparation
    $working_directory=~s/\/$//;
    my %measurements=%{$measurementsptr};

    my $dat_query="INSERT INTO fluxdata (num_observations,observation_ids,source,".
	"frequency,nchans,bandwidth,if_num,processed_as_flux,processed_as_bandpass,".
	"processed_as_phase,fluxcal_id,bandpasscal_id,flux_scale_factor,data_freq,".
	"data_bw,tripleamp_value,tripleamp_scatter_measured,tripleamp_scatter_theoretical,".
	"closurephase_measured,closurephase_theoretical,uv_data) VALUES (";
    $dat_query.="1,'{".$id."}','".$source."',".$freq.",".$nchan.",".$bw.",".$ifnum.",";
    if ($fluxproc==1){
	$dat_query.="TRUE";
    } else {
	$dat_query.="FALSE";
    }
    $dat_query.=",";
    if ($bandproc==1){
	$dat_query.="TRUE";
    } else {
	$dat_query.="FALSE";
    }
    $dat_query.=",";
    if ($phaseproc==1){
	$dat_query.="TRUE";
    } else {
	$dat_query.="FALSE";
    }
    $dat_query.=",";
    if ($fluxproc==1){
	$dat_query.="NULL";
    } else {
	$dat_query.=$fluxid;
    }
    $dat_query.=",";
    if ($bandproc==1){
	$dat_query.="NULL";
    } else {
	$dat_query.=$bandid;
    }
    $dat_query.=",";
    if ($fluxproc==1){
	$dat_query.="NULL";
    } else {
	$dat_query.=$fluxscale;
    }
    $dat_query.=",";
    $dat_query.="'".&array_to_text(@{$measurements{'frequencies'}})."','".
	&array_to_text(@{$measurements{'bandwidths'}})."','".
	&array_to_text(@{$measurements{'tripleamp_value'}})."','".
	&array_to_text(@{$measurements{'tripleamp_scatter_actual'}})."','".
	&array_to_text(@{$measurements{'tripleamp_scatter_theoretical'}})."','".
	&array_to_text(@{$measurements{'closure_phases_actual'}})."','".
	&array_to_text(@{$measurements{'closure_phases_theoretical'}})."',";

    # check if there are any bad values
    if (($dat_query=~/\'nan\'/)||($dat_query=~/\*+/)){
	# yep, this query won't work!
	return -1;
    }
    
    # copy the compressed dataset to the database server
    my $input_dataset=$working_directory."/".$source.".".$freq.".tar.bz2";
    my $database_server_login="postgres\@localhost";
    my $database_server_port=2222;
    my $database_server_path="/var/lib/postgresql";
    my $copy_command="scp -P ".$database_server_port;
    $copy_command.=" ".$input_dataset;
    $copy_command.=" ".$database_server_login.":.";
    if ($output_debug>0){
	print "executing: $copy_command\n";
    }
    system $copy_command;
    $dat_query.="lo_import('".$database_server_path."/".$source.".".$freq.".tar.bz2')";
    $dat_query.=")";

    if ($output_debug>0){
	print "executing query: $dat_query\n";
    }
    my $dat=$dbh->prepare($dat_query);
    my $rc=$dat->execute;
    if ($rc!=1){
	# we may have gotten an error!
	if ($dat->err!=0){
	    if ($output_debug>0){
		print "error inserting into database!\n";
	    }
	    $dat->finish;
	    return -1;
	}
    }
    $dat->finish;
    # clean up the compressed dataset
    my $remove_command="ssh -p ".$database_server_port;
    $remove_command.=" ".$database_server_login;
    $remove_command.=" rm -f ".$source.".".$freq.".tar.bz2";
    if ($output_debug>0){
	print "executing: $remove_command\n";
    }
    system $remove_command;

    # now determine what id that entry has
    my $gid_query="SELECT id FROM fluxdata ORDER BY id DESC LIMIT 1";
    if ($output_debug>0){
	print "executing query: $gid_query\n";
    }
    my $gid=$dbh->prepare($gid_query);
    $gid->execute;
    my $entry_id;
    while (my ($idresponse)=$gid->fetchrow){
	$entry_id=$idresponse;
    }
    $gid->finish;

    return $entry_id;
}

sub make_measurements {
    my ($source,$working_directory,$if_freq)=@_;
    
    # some MIRIAD commands
    my $miriad_closure_command="closure";

    # preparation
    $working_directory=~s/\/$//;

    my %measurements;

    # get the frequencies of each channel
    my @freqs=&frequency_axis($source,$working_directory,$if_freq);
    my $avchans=$#freqs;
    if ($output_debug>0){
	print "got $avchans channels in averaged dataset, frequencies:\n";
	print &array_to_text(@freqs)."\n";
    }
    my $n_spectral_measurements=4; # make this many measurements across the band
    my $spectral_channels=$avchans/$n_spectral_measurements;

    my @first_channels;
    my @last_channels;
    my @middle_channels;
    my @middle_freqs;
    my @bandwidths;
    for (my $j=0;$j<=$n_spectral_measurements;$j++){
	if ($j==0){
	    # special case, the whole band
	    $first_channels[$j]=1;
	    $last_channels[$j]=$#freqs;
	} else {
	    $first_channels[$j]=1+($j-1)*$spectral_channels;
	    $last_channels[$j]=$j*$spectral_channels;
	}
	$middle_channels[$j]=($first_channels[$j]+$last_channels[$j])/2.0;
	if ($middle_channels[$j]!=floor($middle_channels[$j])){
	    # between 2 channels
	    $middle_freqs[$j]=($freqs[floor($middle_channels[$j])]+
			       $freqs[ceil($middle_channels[$j])])/2.0;
	} else {
	    $middle_freqs[$j]=$freqs[$middle_channels[$j]];
	}
	$bandwidths[$j]=$freqs[$last_channels[$j]]-$freqs[$first_channels[$j]];
    }

    $measurements{'frequencies'}=[@middle_freqs];
    $measurements{'bandwidths'}=[@bandwidths];

    # get closure phases
    my @closure_phases_actual;
    my @closure_phases_theoretical;
    my $input_dataset=$working_directory."/".$source.".".$if_freq;
    for (my $i=0;$i<=$#first_channels;$i++){
	my $closure_command=$miriad_closure_command;
	$closure_command.=" vis=".$input_dataset;
	$closure_command.=" stokes=i";
	if ($i>0){
	    $closure_command.=" line=chan,$spectral_channels,".$first_channels[$i];
	}
	if ($output_debug>0){
	    print "executing $closure_command\n";
	}
	open(CLOSURE,"-|")||exec $closure_command." 2>&1";
	while(<CLOSURE>){
	    chomp;
	    my $line=$_;
	    if ($output_debug>2){
		print "MIRIAD: $line\n";
	    }
	    my @els=split(/\s+/,$line);
	    if ($line=~/^Actual\sclosure\sphase/){
		push @closure_phases_actual,$els[$#els];
	    } elsif ($line=~/^Theoretical\sclosure\sphase/){
		push @closure_phases_theoretical,$els[$#els];
	    } elsif ($line=~/^\#\#\#\sFatal\sError/){
		# this command has failed
		push @closure_phases_actual,-999.999;
		push @closure_phases_theoretical,-999.999;
	    }
	}
	close(CLOSURE);
    }
    $measurements{'closure_phases_actual'}=[@closure_phases_actual];
    $measurements{'closure_phases_theoretical'}=[@closure_phases_theoretical];

    # get triple amplitudes
    my @tripleamp_values;
    my @tripleamp_scatter_actual;
    my @tripleamp_scatter_theoretical;
    for (my $i=0;$i<=$#first_channels;$i++){
	my $closure_command=$miriad_closure_command;
	$closure_command.=" vis=".$input_dataset;
	$closure_command.=" stokes=i";
	$closure_command.=" options=amplitude";
	if ($i>0){
	    $closure_command.=" line=chan,$spectral_channels,".$first_channels[$i];
	}
	if ($output_debug>0){
	    print "executing $closure_command\n";
	}
	open(CLOSURE,"-|")||exec $closure_command." 2>&1";
	while(<CLOSURE>){
	    chomp;
	    my $line=$_;
	    if ($output_debug>2){
		print "MIRIAD: $line\n";
	    }
	    my @els=split(/\s+/,$line);
	    if ($line=~/^Mean\samplitude/){
		push @tripleamp_values,$els[$#els];
	    } elsif ($line=~/^Actual\samplitude\srms/){
		push @tripleamp_scatter_actual,$els[$#els];
	    } elsif ($line=~/^Theoretical\samplitude\srms/){
		push @tripleamp_scatter_theoretical,$els[$#els];
	    } elsif ($line=~/^\#\#\#\sFatal\sError/){
		# this command has failed
		push @tripleamp_values,-999.999;
		push @tripleamp_scatter_actual,-999.999;
		push @tripleamp_scatter_theoretical,-999.999;
	    }
	}
	close(CLOSURE);
    }
    $measurements{'tripleamp_value'}=[@tripleamp_values];
    $measurements{'tripleamp_scatter_actual'}=[@tripleamp_scatter_actual];
    $measurements{'tripleamp_scatter_theoretical'}=[@tripleamp_scatter_theoretical];

    return (\%measurements);
}

sub frequency_axis {
    my ($source,$working_directory,$if_freq)=@_;

    # some MIRIAD commands
    my $miriad_uvlist_command="uvlist";
    
    # preparation
    $working_directory=~s/\/$//;

    my $input_dataset=$working_directory."/".$source.".".$if_freq;
    my $uvlist_command=$miriad_uvlist_command;
    $uvlist_command.=" vis=".$input_dataset;
    $uvlist_command.=" options=spectral";
    if ($output_debug>0){
	print "executing: $uvlist_command\n";
    }
    open(UVLIST,"-|")||exec $uvlist_command;
    my ($startchan,$numchan,$startfreq,$freqinterval);
    while(<UVLIST>){
	chomp;
	my $line=$_;
	my @els=split(/\s+/,$line);
	if ($line=~/^Start\schannel/){
	    $startchan=$els[$#els];
	} elsif ($line=~/^Number\sof\schannels/){
	    $numchan=$els[$#els];
	} elsif ($line=~/Start\sfrequency/){
	    $startfreq=$els[$#els];
	} elsif ($line=~/Frequency\sincrement/){
	    $freqinterval=$els[$#els];
	}
    }
    close(UVLIST);

    my @frequencies;
    for (my $i=$startchan;$i<=$numchan;$i++){
	$frequencies[$i]=$startfreq+($i-$startchan)*$freqinterval;
    }

    return @frequencies;
}

sub compress_dataset {
    my ($source,$working_directory,@ifs)=@_;
    
    # preparation
    $working_directory=~s/\/$//;

    for (my $i=0;$i<=$#ifs;$i++){
	my $input_dataset=$source.".".$ifs[$i];
	my $output_dataset=$working_directory."/".$input_dataset.".tar.bz2";
	my $compress_command="tar -C $working_directory -cjf";
	$compress_command.=" ".$output_dataset;
	$compress_command.=" ".$input_dataset;
	if ($output_debug>0){
	    print "executing: $compress_command\n";
	}
	system $compress_command;
    }
}

sub update_database_metadata {
    my ($dbh,$id,$extradataptr,$n_ifs,$n_usable,@ifs_stuff)=@_;

    my @all_if_freqs;
    my @all_if_bws;
    my @usable_if_freqs;
    for (my $i=0;$i<$n_ifs;$i++){
	push @all_if_freqs,shift @ifs_stuff;
    }
    for (my $i=0;$i<$n_ifs;$i++){
	push @all_if_bws,shift @ifs_stuff;
    }
    for (my $i=0;$i<$n_usable;$i++){
	push @usable_if_freqs,shift @ifs_stuff;
    }

    my %extradata=%{$extradataptr};

    my $sfx="_if".$usable_if_freqs[0];
    # the standard, always present (hopefully!) variables
    my $met_query_variables="az,el,attenmm,subrefl,corr_cfg,airtemp,chi,evector,jyperk,".
	"pntra,pntdec,relhumid,smonrms,wind,winddir";
    my $met_query_values=$extradata{"antaz$sfx"}.",".$extradata{"antel$sfx"}.",'{".
	&array_to_text(@{$extradata{'attenmm_a'}}).",".
	&array_to_text(@{$extradata{'attenmm_b'}})."}','".
	&array_to_text(@{$extradata{'subrefl'}})."','".$extradata{'corr_cfg'}."',".
	$extradata{"airtemp$sfx"}.",".$extradata{"chi$sfx"}.",".$extradata{"evector$sfx"}.",".
	$extradata{"jyperk$sfx"}.",".$extradata{"pntra$sfx"}.",".$extradata{"pntdec$sfx"}.",".
	$extradata{"relhumid$sfx"}.",".$extradata{"smonrms$sfx"}.",".$extradata{"wind$sfx"}.",".
	$extradata{"winddir$sfx"};
    # variables that may or may not be present
    my @maybe_present=("xtsys","ytsys","xyphase","xcaljy","ycaljy","xgtp","ygtp",
		       "xsdo","ysdo");
    for (my $i=0;$i<=$#maybe_present;$i++){
	if ($extradata{"$maybe_present[$i]$sfx"}){
	    my $tsys_string="'{";
	    for (my $j=0;$j<=$#all_if_freqs;$j++){
		if ($j!=0){
		    $tsys_string.=",";
		}
		if ($all_if_bws[$j]!=2048){
		    $tsys_string.="{";
		    for (my $k=0;$k<$extradata{"nants_if$all_if_freqs[$j]"};$k++){
			if ($k!=0){
			    $tsys_string.=",";
			}
			$tsys_string.="NULL";
		    }
		    $tsys_string.="}";
		} else {
		    my $added=0;
		    for (my $k=0;$k<=$#usable_if_freqs;$k++){
			if ($usable_if_freqs[$k]==$all_if_freqs[$j]){
			    $tsys_string.=&array_to_text(@{$extradata{"$maybe_present[$i]\_if$all_if_freqs[$j]"}});
			    $added=1;
			}
		    }
		    if ($added==0){
			$tsys_string.="{";
			for (my $k=0;$k<$extradata{"nants_if$all_if_freqs[$j]"};$k++){
			    if ($k!=0){
				$tsys_string.=",";
			    }
			    $tsys_string.="NULL";
			}
			$tsys_string.="}";
		    }
		}
	    }
	    $tsys_string.="}'";
	    $met_query_variables.=",$maybe_present[$i]";
	    $met_query_values.=",".$tsys_string;
	}
    }
    if ($extradata{'point_time'}){
	$met_query_variables.=",pointcor,point_time,point_source,point_az,point_el";
	$met_query_values.=",'{".&array_to_text(@{$extradata{'pointcor_az'}}).",".
	    &array_to_text(@{$extradata{'pointcor_el'}})."}','".$extradata{'point_time'}."','".
	    $extradata{'point_source'}."',".$extradata{'point_az'}.",".
	    $extradata{'point_el'};
    }
    my $met_query="UPDATE observation SET (".$met_query_variables.")".
	"= (".$met_query_values.") WHERE id=".$id;
    if ($output_debug>0){
	print "executing query: $met_query\n";
    }
    my $met=$dbh->prepare($met_query);
    $met->execute;
    $met->finish;
    
}

sub average_data {
    my ($source,$working_directory,$n_ifs,@ifs_stuff)=@_;
    
    my @if_freqs;
    my @if_nchans;
    for (my $i=0;$i<$n_ifs;$i++){
	push @if_freqs,shift @ifs_stuff;
    }
    for (my $i=0;$i<$n_ifs;$i++){
	push @if_nchans,shift @ifs_stuff;
    }

    # some MIRIAD commands
    my $miriad_uvaver_command="uvaver";

    # preparation
    $working_directory=~s/\/$//;
    
    # some parameters
    my $nchans=32; # the number of channels the output dataset should have
    my @new_if_nchans;

    my $current_directory=getcwd();
    chdir $working_directory;

    for (my $i=0;$i<=$#if_freqs;$i++){
	my $input_dataset=$source.".".$if_freqs[$i];
	my $output_dataset=$input_dataset.".aver";
	my $average_chans=($if_nchans[$i]-1)/$nchans;
	my $uvaver_line_spec="line=chan,".$nchans.",1,".$average_chans;
	my $uvaver_command=$miriad_uvaver_command;
	$uvaver_command.=" vis=".$input_dataset;
	$uvaver_command.=" out=".$output_dataset;
	$uvaver_command.=" ".$uvaver_line_spec;
	$uvaver_command.=" options=nopass,nopol,nocal";
	if ($output_debug>0){
	    print "executing: $uvaver_command\n";
	}
	system $uvaver_command;
	if ($delete_process==1){
	    my $remove_command="rm -rf ".$input_dataset;
	    if ($output_debug>0){
		print "executing: $remove_command\n";
	    }
	    system $remove_command;
	} else {
	    my $rename_command="mv ".$input_dataset." ".$input_dataset.".nonavg";
	    if ($output_debug>0){
		print "executing: $rename_command\n";
	    }
	    system $rename_command;
	}
	my $rename_command="mv ".$output_dataset." ".$input_dataset;
	if ($output_debug>0){
	    print "executing: $rename_command\n";
	}
	system $rename_command;
	push @new_if_nchans,$nchans;
    }

    chdir $current_directory;

    return @new_if_nchans;
}

sub clean_up {
    # deletes any files we've created
    my ($source,$rpfits,$working_directory,@ifs)=@_;

    # preparation
    $working_directory=~s/\/$//;
    
    # remove the MIRIAD files and compressed archives
    for (my $i=0;$i<=$#ifs;$i++){
	my $remove_command="rm -rf";
	$remove_command.=" ".$working_directory."/".$source.".".$ifs[$i];
	if ($output_debug>0){
	    print "executing: $remove_command\n";
	}
	system $remove_command;
	
	$remove_command.=".tar.bz2";
	if ($output_debug>0){
	    print "executing: $remove_command\n";
	}
	system $remove_command;
	
    }

    # remove the RPFITS file
    my $remove_command="rm -rf";
    $remove_command.=" ".$rpfits;
    if ($output_debug>0){
	print "executing: $remove_command\n";
    }
    system $remove_command;

}

sub check_flagging {
    my ($source,$working_directory,@ifs)=@_;

#    if ($output_debug>0){
#	print "in check_flagging routine\n";
#    }
    # some MIRIAD commands
    my $miriad_uvfstats_command="uvfstats";
    
    # preparation
    $working_directory=~s/\/$//;
#    if ($output_debug>0){
#	print "got working directory $working_directory\n";
#	print "got ".($#ifs+1)." IFs: ".&array_to_text(@ifs)."\n";
#	print "got source $source\n";
#    }

    my @flagging_levels;
    for (my $i=0;$i<=$#ifs;$i++){
	my $input_dataset=$working_directory."/".$source.".".$ifs[$i];
	my $uvfstats_command=$miriad_uvfstats_command;
	$uvfstats_command.=" vis=".$input_dataset;
	$uvfstats_command.=" \"select=pol(xx,yy)\"";
	$uvfstats_command.=" mode=overall";
	if ($output_debug>0){
	    print "executing: $uvfstats_command\n";
	}
	open(UVFSTATS,"-|")||exec $uvfstats_command;
	while(<UVFSTATS>){
	    chomp;
	    my $line=$_;
	    if ($line=~/Overall/){
		my $flevel=$line;
		$flevel=~s/\s*Overall\s+(.*)\%/$1/;
		$flagging_levels[$i]=$flevel;
	    }
	}
	close(UVFSTATS);
	if ($output_debug>0){
	    print "found $flagging_levels[$i] % flagged data in IF $ifs[$i]\n";
	}
    }

    return @flagging_levels;
}

sub postgresql_to_miriad_time {
    my ($postgresql_time)=@_;
    
    $postgresql_time=~/^([^\-]*)\-([^\-]*)\-([^\-]*)\s([^\:]*)\:([^\:]*)\:(.*)$/;
    my ($year,$month,$day,$hour,$minute,$second)=($1,$2,$3,$4,$5,$6);
    
    my $miriad_time=sprintf "%4d%3s%02d:%02d:%02d:%02d",$year,
    &month_number_to_name($month),$day,$hour,$minute,$second;
    
    return $miriad_time;
}

sub month_number_to_name {
    my ($month_number)=@_;
    
    my @month_names=("JAN","FEB","MAR","APR","MAY","JUN",
		     "JUL","AUG","SEP","OCT","NOV","DEC");
    $month_number--;
    return $month_names[$month_number];
    
}

sub event_flag {
    my ($dbh,$source,$start_time,$scan_time,$cycle_time,$working_directory,@ifs)=@_;

    # some MIRIAD commands
    my $miriad_uvflag_command="uvflag";

    # preparation
    $working_directory=~s/\/$//;

    # get the events that occurred during the scan
    my $evs_query="SELECT * FROM events WHERE event_time>'".$start_time."' AND ".
	"event_time<(TIMESTAMP '".$start_time."' + INTERVAL '".$scan_time." second') ".
	"ORDER BY event_time DESC LIMIT 1";
    if ($output_debug>0){
	print "executing query: $evs_query\n";
    }
    my $evs=$dbh->prepare($evs_query);
    $evs->execute;

    my $needs_flagging=0;
    my $event_time;
    while(my @events=$evs->fetchrow){
	# pretty much any correlator event will make us need to recalibrate, so
	# we flag all the way from the start to the time of the last event (which
	# should be the output of the query), plus four extra cycles to let the
	# event conclude.
	$needs_flagging=1;
	$event_time=$events[1];
    }
    $evs->finish;

    if ($needs_flagging==0){
	# no flagging required
	return;
    }

    my $buffer_time=4*$cycle_time; # in seconds
    # we ask postgresql for the time answers since it knows how to do it :)
    my $gtq_query="SELECT TIMESTAMP '".$event_time."' + INTERVAL '".$buffer_time." second'";
    if ($output_debug>1){
	print "executing query $gtq_query\n";
    }
    my $gtq=$dbh->prepare($gtq_query);
    $gtq->execute;
    
    my $final_time;
    while(my @times=$gtq->fetchrow){
	$final_time=$times[0];
    }
    $gtq->finish;

    # now flag the datasets
    for (my $i=0;$i<=$#ifs;$i++){
	my $input_dataset=$working_directory."/".$source.".".$ifs[$i];
	my $uvflag_command=$miriad_uvflag_command;
	$uvflag_command.=" vis=".$input_dataset;
	$uvflag_command.=" flagval=flag";
	$uvflag_command.=" \"select=time(".
	    &postgresql_to_miriad_time($start_time).",".
	    &postgresql_to_miriad_time($final_time).")\"";
	if ($output_debug>0){
	    print "executing $uvflag_command\n";
	}
	system $uvflag_command;
    }
    
}

sub autoflag {
    my ($source,$working_directory,@ifs)=@_;

    # some MIRIAD commands
    my $miriad_mirflag_command="mirflag";

    # preparation
    $working_directory=~s/\/$//;

    for (my $i=0;$i<=$#ifs;$i++){
	my $input_dataset=$working_directory."/".$source.".".$ifs[$i];
	for (my $j=0;$j<2;$j++){
	    my $mirflag_command=$miriad_mirflag_command;
	    $mirflag_command.=" vis=".$input_dataset;
	    if ($j==0){
		$mirflag_command.=" stokes=xx,xx,yy";
	    } elsif ($j==1){
		$mirflag_command.=" stokes=yy,xx,yy";
	    }
	    $mirflag_command.=" options=amp,medsed,short";
	    if ($output_debug>0){
		print "executing: $mirflag_command\n";
	    }
	    system $mirflag_command;
	}
    }
}

sub calibrate {
    my ($source,$source_type,$working_directory,@ifs)=@_;

    # some MIRIAD commands
    my $miriad_mfcal_command="mfcal";
    my $miriad_gpcal_command="gpcal";

    # preparation
    $working_directory=~s/\/$//;
    my $returncode=0;

    for (my $i=0;$i<=$#ifs;$i++){
	my $calibrated=0;
	my $do_gpcal=1;
	while($calibrated==0){
	    my $input_dataset=$working_directory."/".$source.".".$ifs[$i];
	    &delete_calibration($input_dataset);
	    my $mfcal_command=$miriad_mfcal_command;
	    $mfcal_command.=" vis=".$input_dataset;
	    $mfcal_command.=" interval=0.1";
	    if ($output_debug>0){
		print "executing: $mfcal_command\n";
	    }
	    open(MFCAL,"-|")|| exec $mfcal_command." 2>&1";
	    my $mfcal_success=1;
	    while(<MFCAL>){
		chomp;
		my $line=$_;
		if ($output_debug>1){
		    print "MIRIAD: $line\n";
		}
		if ($line=~/^\#\#\#\sFatal\sError\:/){
		    $mfcal_success=0;
		}
	    }
	    close(MFCAL);
	    if ($mfcal_success==0){
		$returncode=-1;
		last;
	    }
	    $calibrated=1;
	    if ($do_gpcal==1){
		my $gpcal_command=$miriad_gpcal_command;
		$gpcal_command.=" vis=".$input_dataset;
		$gpcal_command.=" interval=0.1";
		$gpcal_command.=" options=xyvary";
		if ($source_type ne "f"){
		    $gpcal_command.=",qusolve";
		}
		if ($output_debug>0){
		    print "executing: $gpcal_command\n";
		}
		open(GPCAL,"-|")||exec $gpcal_command." 2>&1";
		while(<GPCAL>){
		    chomp;
		    my $line=$_;
		    if ($output_debug>1){
			print "MIRIAD: $line\n";
		    }
		    if ($line=~/\#\#\#\sWarning/){
			if ($line=~/Failed\sto\sconverge/){
			    $calibrated=0;
			    $do_gpcal=0;
			}
		    } elsif ($line=~/^\s+Ant\s.*nan.*/){
			$calibrated=0;
			$do_gpcal=0;
		    } elsif ($line=~/^\#\#\#\sFatal\sError\:/){
			$calibrated=0;
			$do_gpcal=0;
		    }
		}
		close(GPCAL);
	    }
	}
    }

    return $returncode;
}

sub delete_calibration {
    my ($dataset)=@_;

    # preparation
    $dataset=~s/\/$//;

    my $remove_command="rm -f ".$dataset;
    my @delete_elements=("bandpass","gains","leakage");
    for (my $i=0;$i<=$#delete_elements;$i++){
	my $delete_command=$remove_command."/".$delete_elements[$i];
	if ($output_debug>0){
	    print "executing: $delete_command\n";
	}
	system $delete_command;
    }

}

sub array_to_text {
    # turns an array into a postgresql compliant input
    my (@array)=@_;

    my $string="{";
    # check for letters, but we don't look for 'E' which may be used
    # in exponential numbers
    my $needsquotes=0;
    for (my $i=0;$i<=$#array;$i++){
	if ($array[$i]=~/[a-zA-DF-Z]/){
	    $needsquotes=1;
	}
    }
    for (my $i=0;$i<=$#array;$i++){
	if ($i>0){
	    $string.=",";
	}
	if ($needsquotes==1){
	    $string.="'";
	}
	if (!$array[$i]){
	    if ($needsquotes==1){
		$string.="NA";
	    } else {
		$string.="999";
	    }
	} else {
	    $string.=$array[$i];
	}
	if ($needsquotes==1){
	    $string.="'";
	}
    }
    $string.="}";
    return $string;
}

sub get_parameters {
    # this routine handles the getting of all the nice metadata we want to stick
    # into the database
    my ($source,$rpfits_file,$working_directory,@ifs)=@_;

    # we get what we can from MIRIAD's uvlist
    my $miriad_uvlist_command="uvlist";
    my $rpfits_rpfhdr_command="rpfhdr";

    # the hash we will return to the caller
    my %parameters;

    # preparation
    $working_directory=~s/\/$//;

    for (my $i=0;$i<=$#ifs;$i++){
	my $input_dataset=$working_directory."/".$source.".".$ifs[$i];
	my $logfile="/tmp/uvlist.log";
	my $uvlist_command=$miriad_uvlist_command;
	$uvlist_command.=" vis=".$input_dataset;
	$uvlist_command.=" options=var,full";
	$uvlist_command.=" log=".$logfile;
	if ($output_debug>0){
	    print "executing: $uvlist_command\n";
	}
	system $uvlist_command;
	# check that the logfile has been created
	if (!-e $logfile){
	    if ($output_debug>0){
		print "unable to get output of uvlist!\n";
		exit;
	    }
	}
	open(LOGFILE,$logfile);
	my $started=0;
	my $left_to_get=0;
	my $getting_parameter;
	my @paramsget;
	while(<LOGFILE>){
	    chomp;
	    my $line=$_;
	    if ($line=~/^Header\svariables/){
		$started=1;
	    } elsif ($started==1){
		$line=~s/\:/ /g;
		$line=~s/(\d)(\-)/$1 $2/g;
		my @els=split(/\s+/,$line);
		for (my $j=0;$j<=$#els;$j++){
		    if (($els[$j] eq "airtemp")||($els[$j] eq "antaz")||
			($els[$j] eq "antel")||($els[$j] eq "chi")||
			($els[$j] eq "evector")||($els[$j] eq "jyperk")||
			($els[$j] eq "pntdec")||($els[$j] eq "pntra")||
			($els[$j] eq "relhumid")||($els[$j] eq "smonrms")||
			($els[$j] eq "wind")||($els[$j] eq "winddir")||
			($els[$j] eq "nants")){
			my $this_parameter=$els[$j]."_if".$ifs[$i];
			$j++;
			my $this_value=$els[$j];
			$parameters{$this_parameter}=$this_value;
			if ($output_debug>1){
			    print "got $this_parameter : $parameters{$this_parameter}\n";
			}
		    } elsif ($left_to_get>0){
			if ($els[$j] eq $getting_parameter){
			    next;
			}
			push @paramsget,$els[$j];
			$left_to_get--;
			if ($left_to_get==0){
			    $getting_parameter.="_if".$ifs[$i];
			    $parameters{$getting_parameter}=[@paramsget];
			}
		    } elsif (($els[$j] eq "xtsys")||
			     ($els[$j] eq "xyphase")||($els[$j] eq "ytsys")||
			     ($els[$j] eq "xcaljy")||($els[$j] eq "ycaljy")||
			     ($els[$j] eq "xgtp")||($els[$j] eq "ygtp")||
			     ($els[$j] eq "xsdo")||($els[$j] eq "ysdo")){
			if ($left_to_get==0){
			    $getting_parameter=$els[$j];
			    $left_to_get=$parameters{"nants_if$ifs[$i]"};
			    if ($output_debug>1){
				print "starting parameter get for $getting_parameter, $left_to_get values to get\n";
			    }
			    undef @paramsget;
			}
		    }
		}
	    }
	}
	close(LOGFILE);
    }
    
    # we get more parameters from the RPFITS header
    my $rpfhdr_command=$rpfits_rpfhdr_command." ".$rpfits_file;
    if ($output_debug>0){
	print "executing: $rpfhdr_command\n";
    }
    my @attenmm_a;
    my @attenmm_b;
    my @subrefl;
    my @pointcor_az;
    my @pointcor_el;
    open(RPFHDR,"-|")||exec $rpfhdr_command;
    while(<RPFHDR>){
	chomp;
	my $line=$_;
	if ($line=~/^ATTEN/){
	    # mm attenuators
	    $line=~/^ATTEN\s+CA0(.)\sMM\=(\d*)\s(\d*)/;
	    my $antenna=$1;
	    my $mmatten_a=$2;
	    my $mmatten_b=$3;
	    $attenmm_a[$antenna-1]=$mmatten_a;
	    $attenmm_b[$antenna-1]=$mmatten_b;
	} elsif ($line=~/^SUBREFL/){
	    # subreflector position
	    $line=~/^SUBREFL\s+CA0(.)\sPOS\=\s*(.*)\sTILT.*/;
	    my $antenna=$1;
	    my $position=$2;
	    $subrefl[$antenna-1]=$position;
	} elsif ($line=~/^POINTCOR/){
	    # pointing correction
	    $line=~/^POINTCOR\s+CA0(.)\sAz\=\s*(.*)\sEl\=\s*(.*)\s.*\/.*/;
	    my $antenna=$1;
	    my $azcor=$2;
	    my $elcor=$3;
	    $pointcor_az[$antenna-1]=$azcor;
	    $pointcor_el[$antenna-1]=$elcor;
	} elsif ($line=~/^CORR_CFG/){
	    # correlator configuration
	    $line=~/^CORR_CFG\=\s*(.*)$/;
	    $parameters{'corr_cfg'}=$1;
	} elsif ($line=~/^POINTINF/){
	    # last pointing information
	    $line=~/^POINTINF\s*(.*)\sSOURCE\=\s*(.*)\s*AZ\=\s(.*)\s\EL\=\s(.*)/;
	    $parameters{'point_time'}=$1;
	    $parameters{'point_source'}=$2;
	    $parameters{'point_az'}=$3;
	    $parameters{'point_el'}=$4;
	}
    }
    close(RPFHDR);
    $parameters{'attenmm_a'}=[@attenmm_a];
    $parameters{'attenmm_b'}=[@attenmm_b];
    $parameters{'subrefl'}=[@subrefl];
    # do we need to add the pointing arrays
    if ($parameters{'point_time'}){
	$parameters{'pointcor_az'}=[@pointcor_az];
	$parameters{'pointcor_el'}=[@pointcor_el];
    }
    
    return (\%parameters);
}

sub minmax {
    # get the minimum and maximum values in an array
    my @array=@_;

    my $minval=$array[0];
    my $maxval=$array[0];
    for (my $i=0;$i<=$#array;$i++){
	if ($array[$i]<$minval){
	    $minval=$array[$i];
	}
	if ($array[$i]>$maxval){
	    $maxval=$array[$i];
	}
    }
    return ($minval,$maxval);
}

sub observation_set_unusable {
    my ($dbh,$id)=@_;
    
    my $set_query="UPDATE observation SET num_processed=-1 WHERE id=$id";
    if ($output_debug>0){
	print "query: $set_query\n";
    }
    my $set=$dbh->prepare($set_query);
    $set->execute;
    $set->finish;
}

sub fits_to_miriad {
    my ($rpfits_chunk,$source,$work_directory,@ifs)=@_;

    # some commands
    my $miriad_atlod_command="atlod";
    my $miriad_uvsplit_command="uvsplit";

    # ATLOD into MIRIAD format
    my @output_files;
    $work_directory=~s/\/$//;
    $rpfits_chunk=~s/^.*\/(.*)$/$1/;
    my $current_directory=getcwd();
    chdir $work_directory;
    my $input_file=$rpfits_chunk;
    my $num_success=0;
    for (my $i=0;$i<=$#ifs;$i++){
	my $this_if=$ifs[$i]+1;
	my $output_file=$input_file."_if".$this_if.".uv";
	my $miriad_options="birdie,rfiflag,noauto,nocacal,xycorr,opcorr";
	my $atlod_command=$miriad_atlod_command;
	$atlod_command.=" in=".$input_file;
	$atlod_command.=" out=".$output_file;
	$atlod_command.=" options=".$miriad_options;
	$atlod_command.=" ifsel=".$this_if;
	if ($output_debug>0){
	    print "executing command: $atlod_command\n";
	}
	my $rc=&run_miriad_command($atlod_command);
	if ($rc==0){
	    my $uvsplit_command=$miriad_uvsplit_command;
	    $uvsplit_command.=" vis=".$output_file;
	    $uvsplit_command.=" options=clobber";
	    if ($output_debug>0){
		print "executing command: $uvsplit_command\n";
	    }
	    $rc=&run_miriad_command($uvsplit_command);
	    my $remove_command="rm -rf ".$output_file;
	    if ($output_debug>0){
		print "executing command: $remove_command\n";
	    }
	    system $remove_command;
	}
	if ($rc==0){
	    $num_success++;
	}
    }

    chdir $current_directory;

    if ($num_success==0){
	# something went wrong
	return -1;
    }
}

sub run_miriad_command {
    my ($miriad_command)=@_;
    
    my $rc=0;
    open(MIRIAD_COMMAND,"-|")||exec $miriad_command." 2>&1";
    while(<MIRIAD_COMMAND>){
	chomp;
	my $line=$_;
	if ($output_debug>1){
	    print "MIRIAD: $line\n";
	}
	if ($line=~/^\#\#\#\sFatal\sError/){
	    $rc=-1;
	}
    }
    close(MIRIAD_COMMAND);

    return $rc;
}

sub postgresql_array_get {
    my ($arraystring)=@_;
    # turns a postgresql array string into a perl array
    if ($output_debug>2){
	print "attempting to turn $arraystring into array\n";
    }
    $arraystring=~s/[\{\}]//g;
    my @els=split(/\,/,$arraystring);
    return @els;
}

sub extract_rpfits {
    my ($rpfits_file,$headernum,$source,$datetime,$work_directory)=@_;

    # some commands
    my $rpfits_extract_command="/n/ste616/bin/rpfex";

    # try to find the RPFITS file
    my @search_paths=("/DATA/ARCHIVE_1/ATCA/archive");
    my $found_file="";
    $rpfits_file=~s/^.*\/(.*)$/$1/;

    for (my $i=0;$i<=$#search_paths;$i++){
	$search_paths[$i]=~s/\/$//;
	my $search_file=$search_paths[$i]."/".$rpfits_file;
	if (-e $search_file){
	    $found_file=$search_file;
	    last;
	}
    }
    if ($found_file eq ""){
	# couldn't find this file!
	print "unable to find RPFITS file $rpfits_file from local repository!\n";

	# try to get this file from the ATOA
	$rpfits_file=~/(\d{4})\-(\d{2})\-(\d{2})\_(.*)/;
	my $rpfits_year=$1;
	my $rpfits_month=$2;
	my $rpfits_archive_path="/data/ATOA_1/ATCA";
	if (($rpfits_year>=2009)&&($rpfits_month>7)){
	    $rpfits_archive_path="/data/ATOA_2/ATCA";
	}
	$rpfits_archive_path.="/".$rpfits_year."-".$rpfits_month;

	my $copy_command="rsync -av ste616\@horus:".$rpfits_archive_path."/";
	$copy_command.=$rpfits_file;
	$copy_command.=" ".$work_directory."/";
	if ($output_debug>0){
	    print "executing: $copy_command\n";
	}
	system $copy_command;
	
	$found_file=$work_directory."/".$rpfits_file;
	if (!-e $found_file){
	    $found_file="";
	}

    }

    if ($found_file eq ""){
	# still couldn't find the file
	print "unable to find RPFITS file $rpfits_file from any repository!\n";
	
	return "";
    }

    # extract out the data
    $datetime=~s/\s/_/g;
    my $output_file=$source."_".$datetime.".fits";
    $work_directory=~s/\/$//;
    my $output_full=$work_directory."/".$output_file;
    my $extract_command=$rpfits_extract_command;
    $extract_command.=" -x".$headernum.":".$headernum;
    $extract_command.=" -o ".$output_full;
    $extract_command.=" ".$found_file;

    if ($output_debug>0){
	print "extracting source RPFITS headers with command:\n";
	print $extract_command."\n";
    }
    if (!-e $output_full){
	system $extract_command;
    } # otherwise the file already exists and has already been extracted

    # return the location of the file we just extracted
    return $output_full;
    
}
