######################################################################
# Time-stamp: <2005-09-09 09:58:58 ostolop>
#
# Package: EP::Core::Data::Parser::arrayexpress
#
# Expression Data Parser Classes: Special Parser for Exporting Data from ArrayExpress
######################################################################
package EP::Core::Data::Parser::exp::arrayexpress;

use strict;
use base qw ( EP::Core::Data::Parser );

use EP::Config;

use EP::Common;
use EP::Common::General;
use EP::Common::Error qw ( :try );

use PDL;
use PDL::NiceSlice;
use PDL::IO::FlexRaw;
use PDL::IO::Misc;
use PDL::NetCDF;

use File::Temp;
use File::Spec;

use Tie::IxHash;

# use lib qw ( /ebi/services/ep/local/lib/perl );

sub new {
  my $proto = shift;
  my $class = ref ( $proto ) || $proto;

  my $self = $class->SUPER::new(@_);

  my $h = $EP::Config::h;
  my $dbh = DBI->connect ( $h->{AE_DB}, $h->{AE_DBUSER}, $h->{AE_DBPASSWORD}, $h->{AE_DBATTR} ) or warn "Couldn't open connection to the database: $DBI::errstr\n";
  $self->{AE_DBH} = $dbh;

  my $sql = <<__SQL__;
SELECT i.id, i.identifier, i.name, bdc.netcdf, bdc.order_, bdc.translationtable, bad.designelementdimension_id, bad.quantitationtypedimension_id
FROM TT_bioAssayDat_bioAssayDat badbad, tt_bioassaydata bad, tt_poly_biodatavalues bdvp, tt_biodatacube bdc, tt_identifiable i
WHERE badbad.bioassaydatagroups_id = ?
AND badbad.bioassaydatas_id = bad.id
AND bdvp.id = bad.biodatavalues_id
AND bdc.id = bdvp.t_biodatacube_id
AND i.id = bad.id
ORDER BY bad.id
__SQL__

  $self->{AE_BDC_SH} = $dbh->prepare ( $sql, { ora_auto_lob => 0 } );
  $self->{AE_QTD_SH} = $dbh->prepare ( "SELECT qt.id, i.identifier, i.name FROM tt_quantitationtype qt, tt_identifiable i, tt_quantitatio_t_quantitat qtq WHERE qt.id = qtq.quantitationtypes_id AND qtq.t_quantitationtypedimension_id = ? AND i.id = qt.id ORDER BY qtq.num" );
  $self->{AE_DED_SH} = $dbh->prepare ( "SELECT * FROM tt_designelementdimension ded where ded.id = ?", { ora_auto_lob => 0 } );
  $self->{AE_BA_SH}  = $dbh->prepare ( "SELECT i.id, i.identifier, i.name FROM tt_bioassays_bioassaydat bb, tt_identifiable i WHERE bb.bioassaydatagroups_id = ? AND i.id = bb.bioassays_id ORDER BY i.id" );
  $self->{AE_SPECIES_SH} = $dbh->prepare ( <<__SQL2__ );
select SUBSTR ( d.text,
		9 + INSTR ( d.text, 'species ['),
	        INSTR ( SUBSTR ( d.text, 9 + INSTR ( d.text, 'species [') ), ']' ) - 1
) as species
from tt_experiment e, tt_description d, tt_bioassaydatagroup badg
where e.id=d.t_describable_id
and text like '(Generated%'
and badg.id = ?
and badg.experiment_id= e.id
__SQL2__

  bless $self, $class;
  return $self;
}

sub DESTROY {
  my $self = shift;

  $self->{AE_BDC_SH}->finish;
  $self->{AE_DED_SH}->finish;
  $self->{AE_QTD_SH}->finish;
  $self->{AE_BA_SH}->finish;
  $self->{AE_SPECIES_SH}->finish;

  $self->{AE_DBH}->disconnect;
}

# sub: parse_data
# exports netcdf data from arrayexpress and creates a corresponding entry in the database
sub parse_data {
  my $self = shift;

  my $badg_id = $self->{query}->param ( "arrayexpress_data_export" );
  die "You did not selected a data group!" if not $badg_id;

  warn "Looking for badg $badg_id";

  my $data;

  # TODO: obtain species from datagroup (e.g. from generated description as in the browse ui)
  my $species = "unknown";
  my $species_sh = $self->{AE_SPECIES_SH};
  $species_sh->execute ( $badg_id );
  die "Failed to retrieve species info" if sql_error ( $DBI::errstr ) == 1;
  my ($ae_species) = $species_sh->fetchrow_array();

  foreach my $species_key ( keys %{$EP::Config::h->{SPECIES_METADATA}} ) {
    $species = $species_key if ( exists $EP::Config::h->{SPECIES_METADATA}->{$species_key} and $ae_species =~ /$EP::Config::h->{SPECIES_METADATA}->{$species_key}->{scientificName}/i );    
  }

  warn "Retrieved experiment for species: $species  -- $ae_species";
  
  #   1. Get bioassaydatacube info and others
  my $bdc_sh = $self->{AE_BDC_SH};
  $bdc_sh->execute ( $badg_id );
  die "Failed to retrieve BioDataCube info" if sql_error ( $DBI::errstr ) == 1;

  my @bads;
  my $chunk_size = 32767;
  while ( my $r = $bdc_sh->fetchrow_hashref () ) {
    my $tmp_nc_fh = new File::Temp;
    my $offset = 1;
    while ( my $data = $self->{AE_DBH}->func ( $r->{netcdf}, $offset, $chunk_size, 'ora_lob_read' ) ) {
      print $tmp_nc_fh $data;
      $offset += $chunk_size;
    }
    close($tmp_nc_fh);
    $r->{netcdf_nc} = $tmp_nc_fh;

    # translation tables
#    my $transtable_data;
#    $offset = 1;
#    while ( my $data = $self->{AE_DBH}->func ( $r->{translationtable}, $offset, $chunk_size, 'ora_lob_read' ) ) {
#      $transtable_data .= $data;
#      $offset += $chunk_size;
#    }
#   $r->{transtable_data} = $transtable_data;

    my ( $DE ) = `$EP::Config::h->{NCDUMP_CMD} -h $tmp_nc_fh | grep DE` =~ /DE = (\d+)/;
    warn "Found $DE design elements in $tmp_nc_fh, $r->{id}";
    if ( $DE > 300000 ) {
      die "One of the datasets in this data group had more than 300000 rows - EP:NG doesn't handle data that big at this moment. This often happens if you try to load raw probe-level Affymetrix data - you may wish to either load in the original CEL files - EP:NG can do that via the Affymetrix tab in this component - or use data groups labelled as 'experimental conditions', not as 'hybridizations or scans'.";
    }

    push @bads, $r;
  }

  #   2. Read annotations (DED)
  my $ded_id = $bads[0]->{designelementdimension_id};
  my $ded_sh = $self->{AE_DED_SH};
  $ded_sh->execute ( $ded_id );
  die "Failed to retrieve DesignElementDimension info" if sql_error ( $DBI::errstr ) == 1;

  my $ded_r = $ded_sh->fetchrow_hashref();
  my $offset = 1;   # Offsets start at 1, not 0

  my $ded_data;
  while ( my $data = $self->{AE_DBH}->func ( $ded_r->{elementlist}, $offset, $chunk_size, 'ora_lob_read' ) ) {
    $ded_data .= $data;
    $offset += $chunk_size;
  }

  if ( $ded_r->{t_featuredimension_id} ) {
    warn "Fetching all features";
    my $s1 = $self->{AE_DBH}->prepare ( "SELECT CONTAINEDFEATURES_ID AS ID FROM TT_CONTAINEDFE_T_FEATUREDI WHERE T_FEATUREDIMENSION_ID = ? ORDER BY NUM" );
    my $s2 = $self->{AE_DBH}->prepare ( "SELECT TT_DEPROPERTYVALUE.VALUE, TT_FEATURE.ID, TT_DEPROPERTYVALUE.DEPROPERTY_ID
FROM TT_DEPROPERTYVALUE, TT_CONTAINEDFE_T_FEATUREDI, TT_FEATURE
WHERE (TT_DEPROPERTYVALUE.DESIGNELEMENT_ID = TT_FEATURE.ID OR
TT_DEPROPERTYVALUE.DESIGNELEMENT_ID = TT_FEATURE.COMPOSITESEQUENCE_ID)
AND TT_FEATURE.ID = TT_CONTAINEDFE_T_FEATUREDI.CONTAINEDFEATURES_ID
AND TT_CONTAINEDFE_T_FEATUREDI.T_FEATUREDIMENSION_ID = ?
AND TT_DEPROPERTYVALUE.DEPROPERTY_ID IN (?)
ORDER BY TT_CONTAINEDFE_T_FEATUREDI.NUM, DEPROPERTY_ID" );
  } elsif ( $ded_r->{t_reporterdimension_id}) {
    warn "Fetching all reporters";
    my $s1 = $self->{AE_DBH}->prepare ( "SELECT REPORTERS_ID AS ID FROM TT_REPORTERS_T_REPORTERD WHERE T_REPORTERDIMENSION_ID = ? ORDER BY NUM" );
    my $s2 = $self->{AE_DBH}->prepare ( "select /*+ FIRST_ROWS */ TT_DEPROPERTYVALUE.VALUE, TT_REPORTER.ID, TT_DEPROPERTYVALUE.DEPROPERTY_ID
FROM TT_DEPROPERTYVALUE, TT_REPORTERS_T_REPORTERD, TT_REPORTER
WHERE (TT_DEPROPERTYVALUE.DESIGNELEMENT_ID = TT_REPORTER.ID OR
TT_DEPROPERTYVALUE.DESIGNELEMENT_ID = TT_REPORTER.COMPOSITESEQUENCE_ID)
AND TT_REPORTER.ID = TT_REPORTERS_T_REPORTERD.REPORTERS_ID
AND TT_REPORTERS_T_REPORTERD.T_REPORTERDIMENSION_ID = ?
AND TT_DEPROPERTYVALUE.DEPROPERTY_ID IN (?)
ORDER BY TT_REPORTERS_T_REPORTERD.NUM, DEPROPERTY_ID" );
  } else {
    warn "Fetching all composite seqs";
    my $s1 = $self->{AE_DBH}->prepare ( "SELECT COMPOSITESEQUENCES_ID AS ID FROM TT_COMPOSITESE_T_COMPOSIT1 WHERE T_COMPOSITESEQUENCEDIMENSIO_ID = ? ORDER BY NUM" );
    my $s2 = $self->{AE_DBH}->prepare ( "SELECT /*+ FIRST_ROWS */ TT_DEPROPERTYVALUE.VALUE, TT_DEPROPERTYVALUE.DESIGNELEMENT_ID AS ID, TT_DEPROPERTYVALUE.DEPROPERTY_ID
FROM TT_DEPROPERTYVALUE, TT_COMPOSITESE_T_COMPOSIT1
WHERE TT_DEPROPERTYVALUE.DESIGNELEMENT_ID = TT_COMPOSITESE_T_COMPOSIT1.COMPOSITESEQUENCES_ID
AND TT_COMPOSITESE_T_COMPOSIT1.T_COMPOSITESEQUENCEDIMENSIO_ID = ?
AND TT_DEPROPERTYVALUE.DEPROPERTY_ID IN (?)
ORDER BY TT_COMPOSITESE_T_COMPOSIT1.NUM, DEPROPERTY_ID" );
  }

  # 4. Read Quantitation Types
  my $qtd_id = $bads[0]->{quantitationtypedimension_id};
  my $qtd_sh = $self->{AE_QTD_SH};
  $qtd_sh->execute ( $qtd_id );
  die "Failed to retrieve QuantitationTypeDimension info" if sql_error ( $DBI::errstr ) == 1;

  my @qts;
  while ( my $r = $qtd_sh->fetchrow_hashref() ) {
    if ( $r->{name} ) {
      push @qts, $r->{name}
    } else {
      push @qts, $r->{identifier}
    }
  }

  # 5. Factor Values
  # TODO (See SpecializedQuery.java)

  # 6. Gene Annotations
  # TODO (See SpecializedQuery.java)

  # PREPARE OUR OBJECTS
  # columns
  my $col = 0;
  my $ba_sh = $self->{AE_BA_SH};
  $ba_sh->execute ( $badg_id );
  my @bas;
  while ( my $bad = $ba_sh->fetchrow_hashref() ) {
    my $baname = $bad->{name} ? $bad->{name} : $bad->{identifier};
    foreach ( @qts ) {
      $data->{column}{$col}->{full_annot} = new Tie::IxHash if not exists $data->{column}{$col}->{full_annot};
      $data->{column}{$col}->{full_annot}->Push ( 'column_id', $baname . "/" . $_ );
      $col++;
    }
    push @bas, $bad;
  }

  # rows
  my $row = 0;
  my @deds = split /\n/, $ded_data;
  foreach ( @deds ) {
    my ( $name, $dbid ) = split /\t/, $_;
    $data->{row}{$row}->{full_annot} = new Tie::IxHash if not exists $data->{row}{$row}->{full_annot};
    $data->{row}{$row}->{full_annot}->Push ( 'Name', $name );
#    $data->{row}{$row}->{full_annot}->{DBIdentifier} = $dbid;
    $row++;
  }

  # data
  my $matrix;
  my $ctr = 1;
  my $first_nc = new PDL::NetCDF ( "$bads[0]->{netcdf_nc}" );

  foreach my $bad ( @bads ) {
    my $nc;
    $nc = $first_nc if $ctr == 1;
    $nc = new PDL::NetCDF ( "$bad->{netcdf_nc}", $first_nc ) if $ctr > 1;
    my $bdc = $nc->get ( "BDC" );
    my $baids = $nc->get ( "BAIds" );
    my $order = $bad->{order_};

    # index of B in the BDC - note reversed indexing (a PDL::NetCDF feature?)
    my $b_ind;
    $b_ind = 2 if $order =~ /B../;
    $b_ind = 1 if $order =~ /.B./;
    $b_ind = 0 if $order =~ /..B/;

    my $b_size = $bdc->dim($b_ind);

    warn "Looking for " . join ( ", ", map { $_->{id} } @bas ) . " in netcdf's baids $baids for cube " . $bdc->info . " of order $order " . "(size $b_size)";

    my @baorder;
    foreach my $ba ( @bas ) {
      my $ba_o = which ( $baids == $ba->{id} );
      push @baorder, $ba_o->sclr if not $ba_o->isempty;
    }
    warn "Didn't find any of " . join ( ", ", map { $_->{id} } @bas ) . " in netcdf's baids $baids for cube " . $bdc->info . " of order $order " . "(size $b_size)"
      if scalar @baorder == 0;

    if ( scalar @baorder == 0 ) {
      if ( $b_size == 1 ) {
	warn "Assuming the one B is the right one *and* (NB!) the order of BADs returned by the DB corresponds magically to the order of the BAs (baids was $baids)";
	@baorder = (0);
      } else {
	next;
      }
    }

    foreach my $b ( @baorder ) {
      my $m;
      $m = $bdc->(:,:,$b)->reshape ( $bdc->dim(0), $bdc->dim(1) ) if $order =~ /BDQ|BQD/;
      $m = $bdc->(:,$b,:)->reshape ( $bdc->dim(0), $bdc->dim(2) ) if $order =~ /DBQ|QBD/;
      $m = $bdc->($b,:,:)->reshape ( $bdc->dim(1), $bdc->dim(2) ) if $order =~ /QDB|DQB/;

      warn "$b: " . $m->info;

      if ( not defined $matrix ) {
	$matrix = $m;
      } else {
	$matrix = $matrix->append ( $m );
      }
    }

    $self->{epc}->{session}->status ( "Loading data ($ctr out of " . scalar @bads . ")" );
    $nc->close;
    $ctr++;
  }

  die "Failed to read any data from ArrayExpress!" if not defined $matrix;

  $matrix = $matrix->setbadif ( $matrix <= -1000000 );
  $matrix = $matrix->setbadif ( $matrix >= 10000000 );

  $data->{matrix} = $matrix;

  my $cur_folder = $self->{epc}->{session}->current_folder;
  my $new_filename = mktemp ( File::Spec->catfile ( $EP::Config::h->{EPNG_USRDATA_PATH}, $cur_folder->location, "data.$$.XXXXX" ) );

  throw EP::Common::Error ( -value => CUD_NO_DATA_SUPPLIED ) if -z "$data->{data}";

  if ( $self->_process_uploaded_data ( $data ) ) {
    my ($dataset) = EP::Common::DBI->do_transaction (
     sub {
       my ($self) = @_;

       my $dataset = EP::Common::DBI::Dataset->create ({ name         => substr($self->{query}->param ( "dataset_name" ), 0, 72) . " : " . localtime,
							 description  => $self->{query}->param ( "dataset_description" ) . "",
							 type         => "exp",
							 species      => $species,
							 folder       => $cur_folder->id,
							 ep_session      => $self->{epc}->{current_user}->session_only ? $self->{epc}->{session}->id : undef,
							 owner        => $self->{epc}->{current_user},
							 group_write  => 0,
							 group_read   => 1,
							 other_read   => 0
						       });

       $self->_write_dataset_data ( $dataset, $data ) or die "Failed to write the dataset data!";
       return $dataset;
     }, $self );

    return $dataset;
  }

  die "Failed to process uploaded data.";
}

# sub: _process_uploaded_data
# Processes uploaded data
sub _process_uploaded_data {
  my $self = shift;
  my ( $data ) = @_;

  $self->{epc}->{session}->status ( "Data loaded. Calculating statistics..." );
  warn "Loaded in " . $data->{matrix}->info;

  return OK;
}

# sub: _write_dataset_data
# Writes the data files for an expression dataset to the disk. It uses hash keys for row and column indexes.
sub _write_dataset_data {
  my $self = shift;
  my ( $dataset, $data ) = @_;
  local ( *COLUMNS, *ROWS );
  my (%seen);
  my $row = 0;

  my $folder   = $EP::Config::h->{EPNG_USRDATA_PATH} . "/" . $dataset->folder->location;
  my $filename = $dataset->filename;

  open (COLUMNS, ">$folder/$filename.columns") or die "Couldn't open $folder/$filename.columns, $!";
  open (ROWS, ">$folder/$filename.rows")       or die "Couldn't open $folder/$filename.rows, $!";

  #not used yet.... in the future to be used for mapping the user specified headers to common id's
  my $mapping;
  my @row_keys = keys %{$data->{row}};
  my @row_annot_names = $data->{row}{$row_keys[0]}{full_annot}->Keys;
  print ROWS "ID\t", join ( "\t", @row_annot_names ), "\n";

  my @col_keys = keys %{$data->{column}};
  my @col_annot_names = $data->{column}{$col_keys[0]}{full_annot}->Keys;
  print COLUMNS "ID\t", join ( "\t", @col_annot_names ), "\n";

  foreach my $row_id ( sort { $a <=> $b } keys %{ $data->{row} } ) {
    my $col = 0;

    my $row_header = $data->{row}{$row_id}{header};
    my $row_annot  = $data->{row}{$row_id}{annot};

    print ROWS "$row\t", join ( "\t", $data->{row}{$row_id}{full_annot}->Values ), "\n";

    foreach my $col_id ( sort { $a <=> $b } keys %{ $data->{column} } ) {
      if ( !$seen{$col_id}++ ) {
	print COLUMNS "$col\t", join ( "\t", $data->{column}{$col_id}{full_annot}->Values ), "\n";
      }
      $col++;
    }
    $row++;
  }
  close COLUMNS;
  close ROWS;

  my $hdr = writeflex ( "$folder/$filename.bin",
		       long ( $data->{matrix}->dim(1) ),
		       long ( $data->{matrix}->dim(0) ),
		       float ( $data->{matrix} ) );
  writeflexhdr ( "$folder/$filename.bin", $hdr );

  $hdr = writeflex ( "$folder/$filename.transpose.bin",
		     long ( $data->{matrix}->dim(0) ),
		     long ( $data->{matrix}->dim(1) ),
		     float ( transpose ( $data->{matrix} ) ) );
  writeflexhdr ( "$folder/$filename.transpose.bin", $hdr );

  warn "Successfully written expression dataset $filename to folder $folder" if DEBUG;

  return 1;
}

# sub: _read_dataset
# reads in the expression data from the disk and loads it in memory for use with the Dataset object
sub _read_dataset {
  my ($dataset) = @_;
  local( *COLUMNS, *ROWS, *MATRIX );

  my ($folder) = $EP::Config::h->{EPNG_USRDATA_PATH} . "/" . $dataset->folder->location;
  my ($filename) = $dataset->filename;

  print STDERR "[EP_Core] No folder name defined\n" and return if $folder =~ /^\s*$/;
  print STDERR "[EP_Core] No dataset filename defined\n" and return if $filename =~ /^\s*$/;

  #read the row identifiers and annotations
  open (ROWS, "$folder/$filename.rows")
    or print STDERR "Couldn't open $filename.rows, $!\n" and die;

  chomp (my $row_annot_header = <ROWS>);
  my @row_annot_names = split "\t", $row_annot_header;
  shift @row_annot_names;	# shift because the first item there is "ID"

  while (<ROWS>) {
    chomp $_;
    my ($row_id, @row_annot) = split /\t/, $_;

    my $row_annotation = {};
    $row_annotation->{$row_annot_names[$_]} = $row_annot[$_] foreach 0 .. $#row_annot_names;

    $dataset->{row}{$row_id} = { header => $row_id,
				 mappped_row_header => "",
				 annot => \@row_annot,
				 full_annot => $row_annotation
			       };
  }
  close ROWS;

  #read the column identifiers and annotations
  open (COLUMNS, "$folder/$filename.columns")
    or print STDERR "Couldn't open $filename.columns, $!\n" and die;

  chomp (my $col_annot_header = <COLUMNS>);
  my @col_annot_names = split "\t", $col_annot_header;
  shift @col_annot_names;	# shift because the first item there is "ID"

  while (<COLUMNS>) {
    chomp $_; 
    my ($col_id, @col_annot) = split /\t/, $_;

    my $col_annotation = {};
    $col_annotation->{$col_annot_names[$_]} = $col_annot[$_] foreach 0 .. $#col_annot_names;

    $dataset->{column}{$col_id} = { header => $col_id,
				    annot => \@col_annot,
				    full_annot => $col_annotation
				  };
  }
  close COLUMNS;

  my ($rows, $cols);
  ( $rows, $cols, $dataset->{matrix} ) = readflex ( "$folder/$filename.bin" );

  my @dims = $dataset->{matrix}->dims();
  die "Problem with reading in PDL matrix from $folder/$filename.bin" if $rows != $dims[1] or $cols != $dims[0];

  warn "Read " . $dataset->{matrix}->info() if DEBUG;
  $dataset->{matrix}->inplace->setnantobad;
  $dataset->{matrix}->badflag(1);

  #add some extra stats to object
  $dataset->{binary} = "$filename.bin" if -f "$folder/$filename.bin";
  $dataset->{row_count} = scalar( keys %{ $dataset->{row} } );
  $dataset->{column_count} = scalar( keys %{ $dataset->{column} } );
  $dataset->{stats} = [$dataset->{matrix}->stats()];
}

sub sql_error {
  my ($err) = @_;

  if ( $err ) {
    warn "[DB ERROR] $err";
    return 1;
  } else {
    return 0;
  }
}

1;
