//========================================================================================
// HashSamplingOverlapper.cc
//
// ----------------------------------------------------------------------------
// $Id: $
//------------------------------------------------------------------------------
// $Log: $
//
//
// KJD: Sometimes interval hits indicate span greater than source size. 
// Why does this happen, and what should be done about it?  For now, I truncate. 
//
//========================================================================================
//.......1.........2.........3.........4.........5.........6.........7.........8.........9
//

#include "HashSamplingOverlapper.hh"

extern LogStream ls,ls2,ls3;
extern bool gDEBUG;

//---------------------------------------------------
// * ComputeGlobalRepeatStatistics
//---------------------------------------------------
// Pick some random samples and search them against the search engine
// to determine the distribution of hits for randomly chosen 
// samples.  Samples that hit significantly above the mean 
// can later be filtered as being repeats. 
//
//
void
HashSamplingOverlapper::ComputeGlobalRepeatStatistics(SequenceVector &Queries,
																		HashSampleSearchEngine &HSS)
{
  cout<<"\tCompute repeat statistics..."<<flush;

  RandomNumberGenerator RNG;
  SeqSampler SS;
  SequenceVector Samples;

  int numQueries = Queries.size();
  vector<int> HitsBySample;

  const int NumStatQueries = 2000; // Number of queries to use for repeat stats. 
  Samples.resize(NumStatQueries);
  HitsBySample.resize(NumStatQueries);
  
  //ls<<" NumStatQueries="<<NumStatQueries<<" SampleSize="<<mSampleSize<<flush;
  // Generate a bunch of random samples from the queries
  for(int i = 0;i< NumStatQueries;i++){
	 int qNum = (int)RNG.Uniform(0,numQueries-1);
	 // DEBUG  Look for zero length queries. 
	 for(int q = 0;q < Queries.size();q++){
		if (Queries[q].size() == 0){
		  cout<<"HEY.  Queries[q].size=0 q="<<q<<endl;
		  Queries[q-1].Print();
		  Queries[q].Print();
		  Queries[q+1].Print();
		  exit(1);
		}
	 }

	 // DEBUG  Testing for less than sample size since we're about to grab one.
	 if (Queries[qNum].size()<mSampleSize){
		cout<<"!!HEY!!! Queries[qNum].size() < mSampleSize"<<endl;
		cout<<"Queries[qNum].size()="<<Queries[qNum].size()<<endl;
		cout<<"mSampleSize="<<mSampleSize<<endl;
		cout<<"qNum="<<qNum<<" numQueries="<<numQueries<<endl;
		cout<<"Queries.size()="<<Queries.size()<<endl;
		cout<<"NumStatQueries="<<NumStatQueries<<endl;
		for(int q = 0;q < Queries.size();q++){
		  cout<<"Queries["<<q<<"].size="<<Queries[q].size()<<endl;
		}
		Queries[qNum-1].Print();
		Queries[qNum].Print();
		Queries[qNum+1].Print();
		exit(1);
	 }
	 SS.GetRandomSample(mSampleSize,Queries[qNum],Samples[i]);
	 if (Queries[qNum].size() == 0) cout<<"ERROR Query size = 0"<<endl;
	 if (Samples[i].size() == 0) cout<<"ERROR Samples[i] = 0"<<endl;

	 HitsBySample[i] = 0;
  }

  // Search them against the sources encoded in CST
  //SearchResultVec Results;
  SampleSearchResultVec Results;
  HSS.SearchWithSamples(Samples,Results);


  // Add up the number of hits for each sample
  long TotalHits = 0;
  for(int r = 0;r< Results.size();r++){
	 //	 if (Results[r].mSourceLoc >=0){  // KJD redundant
	 HitsBySample[Results[r].mSampleIdx]++;
	 TotalHits++;	
  }
  //ls<<"TotalHits "<<TotalHits<<endl;
  mMeanHitsPerSample = (float)TotalHits/(float)NumStatQueries;

  float totalDeviation = 0;
  for(int i = 0;i< NumStatQueries;i++){
	 double diff = HitsBySample[i] - mMeanHitsPerSample;
	 double deviation = diff*diff;
	 totalDeviation+=deviation;
  }
  double variance = (1.0/(NumStatQueries-1.0))*totalDeviation;
  mStDevHitsPerSample = sqrt(variance);
  // Actual cutoff for repeat samples.
  mRepeatThreshold = (int)((mStDevRepeatCutoff*mStDevHitsPerSample)+
									mMeanHitsPerSample); 
  
  cout<<"done."<<endl;

  cout<<"\tMeanHitsPerSample="<<mMeanHitsPerSample;
  cout<<" StDevHitsPerSample="<<mStDevHitsPerSample;
  cout<<" RepeatThreshold = "<<mRepeatThreshold;
  cout<<endl<<endl;
}

//---------------------------------------------------
// * ComputeSourceGroupSizes
//---------------------------------------------------
//
void
HashSamplingOverlapper::ComputeSourceGroupSizes()
{
  
  // figure out how much RAM we have, average size of a source, 
  // and come up with an average number of sources in each ST. 
  // This should accomodate worst cases. 
  float temp =((float)mpSources->size()/(float)mSourcesInGroup);

  int itemp = (int)temp;

  if (temp != itemp){
	 mNumSourceGroups = itemp+1;
  }else{
	 mNumSourceGroups = itemp;
  }
}

//---------------------------------------------------
// * AddToGraph
//---------------------------------------------------
// NOTE: OverlapGraph is assumed to already have space for 
// all of the nodes!
// 
void
HashSamplingOverlapper::AddToGraph(int QIdx,vector<float> &Overlaps,
											  AdjacencyListGraph & OverlapGraph)
{
  // Name the node with the query name
  OverlapGraph[QIdx].mName = (*mpQueries)[QIdx].mName;
  
  // For each overlap, if the overlap is large enough, put it in 
  // the OverlapGraph.  
  for(int i = 0;i< Overlaps.size();i++){
	 if (Overlaps[i] >= mMinOverlapForGraph){
		OverlapGraph[QIdx].AddEdge((*mpSources)[i].mName,Overlaps[i]);
	 }
  }
}

//---------------------------------------------------
// * ComputeHitsAndIntervalHitsOnSources
//---------------------------------------------------
// !! Must be called after ComputeSampleHitStats
//
void
HashSamplingOverlapper::ComputeHitsAndIntervalHitsOnSources(SampleSearchResultVec &SearchResults,
																				SampleHitStats &Stats)
{
  if (SearchResults.size() == 0) return;

  // Compute some statistics about hits for each sample
  for(int r = 0;r < SearchResults.size();r++){
	 Stats.mTotalHitsBySample[SearchResults[r].mSampleIdx]++;
  }

  SearchResults.SortBySourceLoc();
  SearchResults.SortBySourceIdx();
  
  register long LastSampleIdx = SearchResults[0].mSampleIdx;
  register long LastSourceLoc = SearchResults[0].mSourceLoc;

  // KJD r=0 correct start point for interval??  
  for(register int r = 0;r< SearchResults.size();r++){		

	 // Search engine has been changed so that SourceIdx is relative to global source
	 // idx, not to the local results. 
	 register long SourceIdx = SearchResults[r].mSourceIdx;
	 register long SampleIdx = SearchResults[r].mSampleIdx;
	 register long SourceLoc = SearchResults[r].mSourceLoc;
		
	 // Samples which hit more than the derived repeat threshold are not counted
	 if (Stats.mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		Stats.mTotalHitsOnSource[SourceIdx]++; 
		Stats.mLastHitOnSource[SourceIdx] = SourceLoc;  
	 }else{
		Stats.mRepeatFilterCount++;
	 }
		
	 // The interval that these two samples are apart
	 long interval = SourceLoc - LastSourceLoc;
	 long targetInterval = (SampleIdx - LastSampleIdx)*mSampleSpacing;
	 
	 // Need something to check that samples are monotonic
	 
	 // If the intervals are close enough, record an interval hit
	 long intervalDiff = 	 abs(abs(interval)-abs(targetInterval));
	 if (intervalDiff < mSampleSize){
		Stats.mIntervalHitsOnSource[SourceIdx]++;
		Stats.mIntervalSpanOnSource[SourceIdx]+= abs(targetInterval);
	 }
	 LastSampleIdx = SampleIdx;
	 LastSourceLoc = SourceLoc;	
  }
}



//---------------------------------------------------
// * FilterInteriorSingletons
//---------------------------------------------------
// !! Must be called after ComputeHitsOnSource or ComputeHitsAndIntervalHits
// 
// Removes singleton hits that are not on the edges of reads. 
// 
void 
HashSamplingOverlapper::FilterInteriorSingletons(SampleHitStats &Stats)
{
  // KJD BUG:  Sometimes s is out of range of mpSources!!!

  // Filter interior singletons. Not efficient, but hey...
  for(int s=0;s<mpSources->size();s++){
	 // If the hit is a singleton
	 if (Stats.mTotalHitsOnSource[s] == 1){
		int idx = Stats.mLastHitOnSource[s];
		// If the hit is in the interior
		if ((idx > 50) && (idx < (mpSources[s].size()-50))){
		  Stats.mTotalHitsOnSource[s] = 0;
		} 
	 }
  }
}

long bugStartTime;
double bugCumulativeTime;
double bugElapsedTime;

//---------------------------------------------------
// * ComputeOverlapGraph
//---------------------------------------------------
// Computes the complete (sparse) overlap graph for the given 
// set of sources and queries. 
//
//
void
HashSamplingOverlapper::ComputeOverlapGraph(AdjacencyListGraph &OverlapGraph)
{

  bool bFirstTime = true;
  FwdStats.mRepeatFilterCount = 0;
  RevStats.mRepeatFilterCount = 0;
	 
  // Initialize stats vectors. 
  //InitSourceStatVectors(mpSources->size(),FwdStats); 
  //InitSourceStatVectors(mpSources->size(),RevStats); 

  // Add nodes for all of the queries 
  OverlapGraph.resize(mpQueries->size());

  // For each source group (i.e. suffix tree or hash of reads).
  ComputeSourceGroupSizes();
  mSampleSpacing = (int)(1000.0/(float)mSamplesPerKbp);
  for(int SourceGroup = 0;SourceGroup < mNumSourceGroups;SourceGroup++){

	 bugStartTime = 0;
	 bugCumulativeTime = 0;
	 bugElapsedTime = 0;

	 cout<<"SourceGroup#: "<<SourceGroup<<" "<<flush;

	 // Create a suffix tree for the sources in this group
	 HashSampleSearchEngine *pHSS = new HashSampleSearchEngine();
	 pHSS->mSampleSize = mSampleSize;
	 pHSS->mSampleDensity = mSamplesPerKbp;
	 pHSS->mTotalSamples = 0;
	 
	 mSourceStart = (SourceGroup*mSourcesInGroup);
	 mSourceEnd = (SourceGroup+1)*mSourcesInGroup-1;
	 if (mSourceEnd >= mpSources->size()) mSourceEnd = mpSources->size()-1;
	 int NumSources = mSourceEnd - mSourceStart + 1;

	 cout<<"  (SourcesInGroup: "<<NumSources;
	 cout<<"  Source Start: "<<mSourceStart<<" SourceEnd: "<<mSourceEnd<<")"<<endl;

	 long StartTime;
	 StartTiming(StartTime);	 
	 cout<<"\tCreate source hash.."<<flush;
	 pHSS->Create(*mpSources,mSourceStart,mSourceEnd);
	 float elapsedTime = EndTiming(StartTime);
	 cout<<"done. "<<elapsedTime<<" sec."<<endl<<endl;

	 // Compute some statistics about the distribution of repeat 
	 // samples (for filtering).
	 if (bFirstTime){
		//cout<<"mpQueries->size()="<<mpQueries->size()<<endl;
		ComputeGlobalRepeatStatistics(*mpQueries,*pHSS);
	 }

	 StartTiming(StartTime);
	 cout<<"\tCompute overlaps "<<mpQueries->size();
	 cout<<" queries vs. "<<NumSources<<" sources.."<<flush;
	 // Compare every query to the sources in CST
	 for(int QueryIdx = 0;QueryIdx < (*mpQueries).size();QueryIdx++){
		if (!(QueryIdx%1000)) cout<<"."<<flush;
		ComputeOverlapsForQuery(QueryIdx,*pHSS,(*mpQueries),OverlapGraph);
	 }
	 float ElapsedTime = EndTiming(StartTime);
	 cout<<"done. "<<endl;
	 cout<<"\tQuery Samples: "<<pHSS->mTotalSamples;
	 cout<<"   TotalTime: "<<ElapsedTime<<" seconds.";
	 cout<<"   SearchTime: "<<bugCumulativeTime<<" seconds."<<endl;
	 cout<<endl;
	 delete pHSS;
  }
}


//---------------------------------------------------
// * EstimateOverlaps
//---------------------------------------------------
//
void
HashSamplingOverlapper::EstimateOverlaps(vector<float> &Overlaps,
													  SampleHitStats &FwdStats,
													  SampleHitStats &RevStats)
{
  for(register int s = mSourceStart;s<=mSourceEnd;s++){
	 register long fwdISpan = FwdStats.mIntervalSpanOnSource[s];
	 register long revISpan = RevStats.mIntervalSpanOnSource[s];

	 // Assign the overlap to whichever has the greater number of 
	 // interval spans, either the forward or the reverse. 
	 if (fwdISpan > revISpan){
		Overlaps[s] = fwdISpan;
	 }else{
		Overlaps[s] = revISpan;
	 }
  }

  //Overlaps[s+mSourceStart] = mIntervalHitsOnSource[s]*mSampleSpacing;
  //Overlaps[s+mSourceStart] = mTotalHitsOnSource[s]*mSampleSpacing;
  //Overlaps[s+mSourceStart] = mTotalHitsOnSource[s];
}

//---------------------------------------------------
// * ConvertOverlapsToFraction
//---------------------------------------------------
//
void
HashSamplingOverlapper::ConvertOverlapsToFraction(vector<float> &Overlaps,
																  SequenceVector &Seqs)
{
  for(int s = mSourceStart;s<=mSourceEnd;s++){
	 double SourceSize = Seqs[s].size();
	 double fraction = Overlaps[s]/SourceSize;
	 int fractionI = int(fraction*100);
	 if (fractionI > 100){
		fractionI = 100;
		//cout<<"ERROR"<<endl;
		//cout<<"s="<<s<<"\t"<<"Over="<<Overlaps[s]<<"\tSource="<<SourceSize<<endl;
	 }

	 Overlaps[s] = fractionI;
  }
}



//---------------------------------------------------
// * ComputeOverlapsForQuery
//---------------------------------------------------
// Computes the overlaps for a single query against all of the
// sequences in the CST.   
//
void 
HashSamplingOverlapper::ComputeOverlapsForQuery(int QueryIdx,
													 HashSampleSearchEngine &CST,
													 SequenceVector &Queries,
													 AdjacencyListGraph &OverlapGraph)
{
  // Clear the stats from the previous read. 
  FwdStats.Clear();
  RevStats.Clear();

  // Compare query samples to current sources
  SampleSearchResultVec FwdSearchResults,RevSearchResults;
  //StartTiming(bugStartTime);
  long FwdNumSamples = CST.Search(Queries[QueryIdx],FwdSearchResults);
  long RevNumSamples = CST.SearchRev(Queries[QueryIdx],RevSearchResults);
  //bugElapsedTime = EndTiming(bugStartTime);
  //bugCumulativeTime+=bugElapsedTime;

  // KJD: Shouldn't do this before call ComputeOverlapsForQuery?
  // Return if sequence was too small to sample. 
  if ((FwdNumSamples <= 0) || (RevNumSamples <=0)) return;

  // Look at FORWARD samples ------------------
  FwdStats.InitSampleStats(FwdNumSamples);

  // Sometimes search returns no results, so there is nothing to do. 
  if (FwdSearchResults.size()>0){
	 ComputeHitsAndIntervalHitsOnSources(FwdSearchResults,FwdStats);
	 if (mbFilterInteriorSingletons) FilterInteriorSingletons(FwdStats); 
  }

  // Look at REVERSE samples ------------------
  RevStats.InitSampleStats(RevNumSamples);

  // Sometimes search returns no results, so there is nothing to do. 
  if (RevSearchResults.size()>0){	
	 ComputeHitsAndIntervalHitsOnSources(RevSearchResults,RevStats);
	 if (mbFilterInteriorSingletons) FilterInteriorSingletons(RevStats);
  }
  
  // Use those statistics to estimate overlap amounts, choosing best overlap from 
  // Fwd or Rev samples.  
  vector<float> Overlaps(mpSources->size(),0.0);
  EstimateOverlaps(Overlaps,FwdStats,RevStats);

  // Overlaps are initialy in bp.  
  ConvertOverlapsToFraction(Overlaps,*mpSources);

  // Add this group of overlaps to the growing OverlapGraph
  AddToGraph(QueryIdx,Overlaps,OverlapGraph);
}

























































//==========================================================================================
//==========================================================================================
//==========================================================================================

#if 0


//---------------------------------------------------
// * PlotSampleHits
//---------------------------------------------------
// Output a little diagram that shows, for each source, 
// where the samples hit.   
//
// Query: r5  (540)     Source: r9  (340)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
// Query: r5  (540)     Source: r8  (430)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
// Query: r5  (540)     Source: r7  (502)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
void
HashSamplingOverlapper::PlotSampleHits(ostream &fout,int NumSamples,string &QueryName,
										 SequenceVector &Sources, SampleSearchResultVec &Results)
{
  const int NoEntry = -1;
  const int MultipleHits = -100;
  
  int rIdx = 0;
  for(int sIdx = 0;sIdx < Sources.size();sIdx++){
	 bool bAllBinsZero = true;
	 long SourceSize = Sources[sIdx].size();
	 int NumBins = (int)((float)SourceSize/(float)mSampleSpacing);

	 int *SourceBins = new int[NumBins];
	 // This is a printing function, so efficiency is not an issue...
	 for(int i = 0;i<NumBins;i++) SourceBins[i]=NoEntry;
	 for(int r = 0;r<Results.size();r++){
		// Found a result for this Source....
		if (Results[r].mSourceIdx == sIdx){
		  int bin = (Results[r].mSourceLoc/mSampleSpacing);
		  if (SourceBins[bin] == -1){
			 // Remove repeat samples
			 if (mTotalHitsBySample[Results[r].mQueryIdx] < mRepeatThreshold){
				SourceBins[bin]= Results[r].mQueryIdx;
				bAllBinsZero = false;
			 }
		  }else{
			 // Multiple hits in a bin... save special marker
			 SourceBins[bin] = MultipleHits;
		  }
		}
	 }

	 // Only want to print out sources with some non-zero bin. 
	 if (!bAllBinsZero){
		fout<<"S: "<<Sources[sIdx].mName<<" ("<<SourceSize<<")"<<flush;
		//string description = "Sequence"+itoa(sIdx);
		//fout<<"S: "<<Sources[sIdx].mName<<" ("<<SourceSize<<")"<<flush;
		fout<<"Bins="<<NumBins<<" Q:"<<QueryName<<"\t";

		// Print out the hits for this source...
		for(int b = 0;b< NumBins;b++){
		  int binContents = SourceBins[b];
		  if (SourceBins[b] == MultipleHits){
			 fout<<"++ "<<flush;
		  }else if (SourceBins[b] == NoEntry){
			 fout<<".. "<<flush;
		  }else{
			 if (SourceBins[b]<10){
				fout<<"0"<<SourceBins[b]<<" "<<flush;
			 }else{
				fout<<SourceBins[b]<<" "<<flush;
			 }
		  }
		}
		fout<<endl;
	 }
	 delete SourceBins;
  }
}

//---------------------------------------------------
// * PlotSampleHits2
//---------------------------------------------------
// Output a little diagram that shows, for each source, 
// where the samples hit, indicating true overlaps.  
//
// Query: r5  (540)     Source: r9  (340)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
// Query: r5  (540)     Source: r8  (430)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
// Query: r5  (540)     Source: r7  (502)  #Samp=19 
// Fwd Hits: .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. 
//
void
HashSamplingOverlapper::PlotSampleHits2(ostream &fout,int NumSamples,int QIdx,
										  SequenceVector &Queries,
										  SequenceVector &Sources, SampleSearchResultVec &Results,
										  Table<int> &TrueOverlaps,vector<float> &PredictedOverlaps)
{
  const int NoEntry = -1;
  const int MultipleHits = -100;
  int rIdx = 0;
  int qIdx,sIdx;
  fout<<"QIdx="<<QIdx<<" MeanHitsPerSample="<<mMeanHitsPerSample<<"\t";
  for(int i = 0;i< mTotalHitsBySample.size();i++){
	 fout<<mTotalHitsBySample[i]<<" ";
  }
  fout<<endl;
  bool bugOverlap;
 

  for(int sIdx = 0;sIdx < Sources.size();sIdx++){
	 int TrueOverlap = 0;
	 //bool bTrueOverlap = false;
	 bool bAllBinsZero = true;
	 long SourceSize = Sources[sIdx].size();
	 int NumBins = (int)((float)SourceSize/(float)mSampleSpacing);
	 //int *SourceBins = new int[NumBins];
	 int SourceBins[NumBins];
	 for(int i = 0;i< NumBins;i++) SourceBins[i]=0;

	 // This is a printing function, so efficiency is not an issue...
	 for(int i = 0;i<NumBins;i++) SourceBins[i]=NoEntry;
	 for(int r = 0;r<Results.size();r++){
		// Found a result for this Source....
		if (Results[r].mSourceIdx == sIdx){
		  int bin = (Results[r].mSourceLoc/mSampleSpacing);
		  if (SourceBins[bin] == -1){
			 // Remove repeat samples
			 if (mTotalHitsBySample[Results[r].mSampleIdx] < mRepeatThreshold){
				SourceBins[bin]= Results[r].mSampleIdx;
				bAllBinsZero = false;
			 }
		  }else{
			 // Multiple hits in a bin... save special marker
			 SourceBins[bin] = MultipleHits;
		  }
		  // See if this is a true hit
		  sIdx = Results[r].mSourceIdx;
		  //qIdx = Results[r].mQueryIdx;  // Blockhead... this is sample idx
		  //		  if (TrueOverlaps[QIdx][sIdx]) bTrueOverlap = true;
		  TrueOverlap = TrueOverlaps[QIdx][sIdx];
		}
	 }

	 // Only want to print out sources with some non-zero bin or a true overlap
	 if ((!bAllBinsZero) || (TrueOverlap>0)){
		int offset = 0;
		string sDesc = GetNextWSDelimitedString(Sources[sIdx].mName,0,offset);
		string qDesc = GetNextWSDelimitedString(Queries[QIdx].mName,0,offset);
		int IntervalHitsOnSource = mIntervalHitsOnSource[sIdx];
		PrintSpacePaddedNum(fout,4,IntervalHitsOnSource);
		int POverlap = (int) PredictedOverlaps[sIdx];
		PrintSpacePaddedNum(fout,4,POverlap);
		fout<<" ";
		PrintSpacePaddedNum(fout,4,TrueOverlap);
		fout<<" ";
		fout<<"S: "<<sDesc<<" ("<<SourceSize<<")"<<flush;
		//string description = "Sequence"+itoa(sIdx);
		//fout<<"S: "<<Sources[sIdx].mName<<" ("<<SourceSize<<")"<<flush;
		fout<<"Bins="<<NumBins<<" Q:"<<qDesc<<"\t ";

		// Print out the hits for this source...
		for(int b = 0;b< NumBins;b++){
		  int binContents = SourceBins[b];
		  if (SourceBins[b] == MultipleHits){
			 fout<<"++ "<<flush;
		  }else if (SourceBins[b] == NoEntry){
			 fout<<".. "<<flush;
		  }else{
			 if (SourceBins[b]<10){
				fout<<"0"<<SourceBins[b]<<" "<<flush;
			 }else{
				fout<<SourceBins[b]<<" "<<flush;
			 }
		  }
		}
		fout<<endl;
	 }
	 //delete [] SourceBins;
  }
}



//---------------------------------------------------
// * ComputeOverlapsForQueryNoRev
//---------------------------------------------------
// Computes the overlaps for a single query against all of the
// sequences in the CST.   
//
void 
HashSamplingOverlapper::ComputeOverlapsForQueryNoRev(int QueryIdx,
													 HashSampleSearchEngine &CST,
													 SequenceVector &Queries,
													 AdjacencyListGraph &OverlapGraph)
{
  if (!(QueryIdx%500)) cout<<"."<<flush;

  SampleSearchResultVec SearchResults;
  long NumSamples = CST.Search(Queries[QueryIdx],SearchResults);

  // Occasionally, sequences are too small to be sampled. 
  if (NumSamples > 0){
	 mTotalHitsBySample.resize(NumSamples);
	 for(int i = 0;i<NumSamples;i++) mTotalHitsBySample[i]=0;

	 vector<float> Overlaps(mpSources->size(),0.0);

	 // Ensure that previous statistics are cleared.  Note that later may want
	 // to keep cumulative statistics. 
	 ClearSourceStatVectors();

	 // Sometimes search returns no results, so there is nothing to do. 
	 if (SearchResults.size()>0){
		// Compute some statistics 
		ComputeSampleHitStats(SearchResults);
		ComputeHitsAndIntervalHitsOnSources(SearchResults);
		if (mbFilterInteriorSingletons) FilterInteriorSingletons();

		// Use those statistics to estimate overlap amounts. 
		EstimateOverlaps(Overlaps);

		ConvertOverlapsToFraction(Overlaps,*mpSources);

		// Add edges to the graph corresponding to the non-zero overlaps. 
		AddToGraph(QueryIdx,Overlaps,OverlapGraph);
	 }

	 if (gDEBUG){

		// **** DEBUG OUTPUT ****
		// ls.verbose() sets the ls stream to actually produce output only when 
		// global logging level is set to verbose or greater. 
		ls.verbose()<<"QuerySamples.size()="<<NumSamples<<endl;
		//SearchResults.Print(ls.verbose());
		//SearchResults.PrintLocPlot(ls.verbose(),100,*mpSources);
		ls2.verbose()<<endl;
		PlotSampleHits2(ls2.verbose(),NumSamples,QueryIdx,*mpQueries,
							 *mpSources,SearchResults,*mpTrueOverlaps,Overlaps);

		//PlotSampleHits(ls2.verbose(),NumSamples,(*mpQueries)[QueryIdx].mName,
		//					*mpSources,SearchResults);
		// **** DEBUG OUTPUT ****
	 }
  }
}


//---------------------------------------------------
// * ComputeUnfilteredHitsAndIntervalHitsOnSources
//---------------------------------------------------
void
HashSamplingOverlapper::ComputeUnfilteredHitsAndIntervalHitsOnSources(SampleSearchResultVec 
																							 &SearchResults)
{
 if (SearchResults.size() == 0) return;

  SearchResults.SortBySourceLoc();
  SearchResults.SortBySourceIdx();
  
  long LastSampleIdx = SearchResults[0].mQueryIdx;
  long LastSourceLoc = SearchResults[0].mSourceLoc;
  
  // Note:  The "queries" in SearchResults are the query Samples
  for(int r = 0;r< SearchResults.size();r++){		
	 // Note: mSourceIdx is relative to the subset of Sources the current Stree 
	 // is built for, must add mSourceStart to make relative to whole set of Sources
	 long SourceIdx = SearchResults[r].mSourceIdx+mSourceStart;// KJD Scrutinize!! 
	 long SampleIdx = SearchResults[r].mQueryIdx;
	 long SourceLoc = SearchResults[r].mSourceLoc;
		
	 // Samples which hit more than the derived repeat threshold are not counted
	 //if (mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		mTotalHitsOnSource[SourceIdx]++;
		mLastHitOnSource[SourceIdx] = SourceLoc;  
		
	 // The interval that these two samples are apart
	 long interval = SourceLoc - LastSourceLoc;
	 long targetInterval = (SampleIdx - LastSampleIdx)*mSampleSpacing;
		
	 // Need something to check that samples are monotonic

	 // If the intervals are close enough, record an interval hit
	 long intervalDiff = abs(abs(interval)-abs(targetInterval));
	 if (intervalDiff < mSampleSize){
		mIntervalHitsOnSource[SourceIdx]++;
		mIntervalSpanOnSource[SourceIdx]+= abs(targetInterval);
	 }
	 LastSampleIdx = SampleIdx;
	 LastSourceLoc = SourceLoc;	
  }
}

//---------------------------------------------------
// * ComputePerfectSensitivyOverlapsForQuery
//---------------------------------------------------
// Computes the overlaps for a single query against all of the
// sequences in the CST.   
//
void 
HashSamplingOverlapper::ComputePerfectSensitivityOverlapsForQuery(int QueryIdx,
												HashSampleSearchEngine &CST,
													 SequenceVector &QuerySamples,
													 AdjacencyListGraph &OverlapGraph)
{
  int NumSamples = QuerySamples.size();
  if (!(QueryIdx%100)) cout<<"."<<flush;

  // Occasionally, sequences are too small to be sampled. 
  if (NumSamples > 0){
	 mTotalHitsBySample.resize(NumSamples);
	 for(int i = 0;i<NumSamples;i++) mTotalHitsBySample[i]=0;

	 SampleSearchResultVec SearchResults;
	 CST.Search(QuerySamples,SearchResults);

	 vector<float> Overlaps(mpSources->size(),0.0);

	 // Ensure that previous statistics are cleared.  Note that later may want
	 // to keep cumulative statistics. 
	 ClearSourceStatVectors();

	 if (SearchResults.size() >0){
		ComputeSampleHitStats(SearchResults);
		
		ComputeUnfilteredHitsAndIntervalHitsOnSources(SearchResults);	  
		
		// Simple estimator, for now.
		long groupSize = mIntervalSpanOnSource.size();
		for(int s = mSourceStart;s<=mSourceEnd;s++){
		  //Overlaps[s] = mIntervalSpanOnSource[s];
		  Overlaps[s] = mTotalHitsOnSource[s];
		}

		//cout<<"Query "<<QueryIdx<<" Overlaps[0..14]=";
		//for(int i = 0;i< 15;i++) cout<<Overlaps[i]<<" "<<flush;
		//cout<<endl;
		// Add edges to the graph corresponding to the non-zero overlaps. 
		mMinOverlapForGraph = 1;// KJD BUG
		AddToGraph(QueryIdx,Overlaps,OverlapGraph);
	 }
  }
}

//---------------------------------------------------
// * ComputePerfectSensitivityGraph
//---------------------------------------------------
// Computes the complete (sparse) overlap graph for the given 
// set of sources and queries.  This version should yeild
// perfect sensitivity.  It is used for testing only. 
//
void
HashSamplingOverlapper::ComputePerfectSensitivityGraph(AdjacencyListGraph &OverlapGraph)
{
  // Initialize stats vectors. 
  InitSourceStatVectors(mpSources->size()); 
  ComputeSourceGroupSizes();
  mSampleSpacing = (int)(1000.0/(float)mSamplesPerKbp);

  // Add nodes for all of the queries 
  OverlapGraph.resize(mpQueries->size());
  cout<<"NumSources="<<mpSources->size()<<endl;
  cout<<"NumSourceGroups="<<mNumSourceGroups<<endl;

  vector<SequenceVector> AllQuerySamples;
  CreateQuerySamples(AllQuerySamples);
  //CreateReverseCompliments(QuerySamples,QuerySamplesReverse);

  // For each source group (i.e. suffix tree of reads)
  for(int SourceGroup = 0;SourceGroup < mNumSourceGroups;SourceGroup++){

	 cout<<"SourceGroup "<<SourceGroup<<" "<<flush;

	 // Create a suffix tree for the sources in this group
	 HashSampleSearchEngine *pHSS = new HashSampleSearchEngine();
	 //pHSS->mbFilterSyntheticHits = false;
	 mSourceStart = (SourceGroup*mSourcesInGroup);
	 mSourceEnd = (SourceGroup+1)*mSourcesInGroup-1;
	 if (mSourceEnd >= mpSources->size()) mSourceEnd = mpSources->size()-1;
	 int NumSources = mSourceEnd - mSourceStart + 1;

	 cout<<"NumSources="<<NumSources<<"Source Start=";
	 cout<<mSourceStart<<" SourceEnd="<<mSourceEnd<<endl;
	 
	 long StartTime;
	 StartTiming(StartTime);
	 cout<<"Create.."<<flush;
	 pHSS->Create(*mpSources,mSourceStart,mSourceEnd);
	 float elapsedTime = EndTiming(StartTime);
	 cout<<elapsedTime<<" sec done."<<endl;

	 long TotalQueries = 0;
	 // Compare every query to the sources in CST
	 for(int QueryIdx = 0;QueryIdx < (*mpQueries).size();QueryIdx++){
		ComputePerfectSensitivityOverlapsForQuery(QueryIdx,
																*pHSS,
																AllQuerySamples[QueryIdx],
																OverlapGraph);
		TotalQueries+=AllQuerySamples[QueryIdx].size();
	 }
	 delete pHSS;
  }
}


//---------------------------------------------------
// * ComputeHitsAndIntervalHitsOnSources
//---------------------------------------------------
// !! Must be called after ComputeSampleHitStats
//
void
HashSamplingOverlapper::ComputeHitsAndIntervalHitsOnSources2(SampleSearchResultVec 
																				 &SearchResults)
{
  SearchResults.SortBySourceLoc();
  SearchResults.SortBySourceIdx();

  register long LastSampleIdx = SearchResults[0].mQueryIdx;
  register long LastSourceLoc = SearchResults[0].mSourceLoc;

  // Note:  The "queries" in SearchResults are the query Samples
  // KJD r=0 correct start point for interval??
  for(register int r = 0;r< SearchResults.size();r++){

	 // Compute stats for countable hits (repeat filtered hits will have -1 for Loc)
	 if (SearchResults[r].mSourceLoc >= 0){
		
		// Note: mSourceIdx is relative to the subset of Sources the current Stree 
		// is built for.   
		register long SourceIdx = SearchResults[r].mSourceIdx;
		register long SampleIdx = SearchResults[r].mQueryIdx;
		register long SourceLoc = SearchResults[r].mSourceLoc;
	
		// Count total hits on source. 
		// Samples which hit more than the derived repeat threshold are not counted
		if (mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		  mTotalHitsOnSource[SourceIdx]++;
		  mLastHitOnSource[SourceIdx] = SourceLoc;  
		}else{
		  mRepeatFilterCount++;
		}
		
		// The interval that these two samples are apart
		long interval = SourceLoc - LastSourceLoc;
		long targetInterval = (SampleIdx - LastSampleIdx)*mSampleSpacing;
		
		// Need something to check that samples are monotonic
		bool samplesIncreasing = (interval >=0);
		// If the intervals are close enough, record an interval hit
		long intervalDiff = 	 abs(abs(interval)-abs(targetInterval));
		long intervalCutoff = mSampleSize*2;
		int a = intervalDiff - intervalCutoff;
		if ((samplesIncreasing) && (intervalDiff < intervalCutoff)){
		  mIntervalHitsOnSource[SourceIdx]++;
		  mIntervalSpanOnSource[SourceIdx]+= abs(targetInterval);
		}
		LastSampleIdx = SampleIdx;
		LastSourceLoc = SourceLoc;
	 }
  }
}

//---------------------------------------------------
// * ComputeHitsAndIntervalHitsOnSources
//---------------------------------------------------
// !! Must be called after ComputeSampleHitStats
//
void
HashSamplingOverlapper::ComputeHitsAndIntervalHitsOnSources3(SampleSearchResultVec 
																				 &SearchResults)
{
  SearchResults.SortBySourceLoc();
  SearchResults.SortBySourceIdx();
  
  long LastSampleIdx = SearchResults[0].mQueryIdx;
  long LastSourceLoc = SearchResults[0].mSourceLoc;

  // Note:  The "queries" in SearchResults are the query Samples
  // KJD r=0 correct start point for interval??
  for(int r = 0;r< SearchResults.size();r++){

	 // Compute stats for countable hits (repeat filtered hits will have -1 for Loc)
	 if (SearchResults[r].mSourceLoc >= 0){
		
		// Note: mSourceIdx is relative to the subset of Sources the current Stree 
		// is built for.   
		long SourceIdx = SearchResults[r].mSourceIdx;
		long SampleIdx = SearchResults[r].mQueryIdx;
		long SourceLoc = SearchResults[r].mSourceLoc;
		
		// Samples which hit more than the derived repeat threshold are not counted
		if (mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		  mTotalHitsOnSource[SourceIdx]++;
		  mLastHitOnSource[SourceIdx] = SourceLoc;  
		}else{
		  mRepeatFilterCount++;
		}
		
		// The interval that these two samples are apart
		long interval = SourceLoc - LastSourceLoc;
		long targetInterval = (SampleIdx - LastSampleIdx)*mSampleSpacing;
		
		// Need something to check that samples are monotonic

		// If the intervals are close enough, record an interval hit
		long intervalDiff = 	 abs(abs(interval)-abs(targetInterval));
		if (intervalDiff < mSampleSize){
		  mIntervalHitsOnSource[SourceIdx]++;
		  mIntervalSpanOnSource[SourceIdx]+= abs(targetInterval);
		}
		LastSampleIdx = SampleIdx;
		LastSourceLoc = SourceLoc;
		
	 }
  }
}

//---------------------------------------------------
// * PrintHitDistribution
//---------------------------------------------------
//
void
HashSamplingOverlapper::PrintHitDistribution(ostream &fout,vector<int> &Values,
												 float mean,float stDev)
{
  fout<<endl;
  int VMax = 0;
  for(int v = 0;v< Values.size();v++){
	 if (Values[v] > VMax){
		VMax = Values[v];
	 }
  }

  vector<int> ValueBins;
  ValueBins.resize(VMax+1); // plus one to accomodate zero. 
  for(int i = 0;i<VMax;i++){ValueBins[i]=0;}

  for(int j = 0;j< Values.size();j++){
	 ValueBins[Values[j]]++;
  }

  for(int k = 0;k< ValueBins.size();k++){
	 fout<<"Bin "<<k<<"\t"<<ValueBins[k]<<endl;
  }
  fout<<"\nMean= "<<mean<<" StDev="<<stDev<<endl; 
}

//---------------------------------------------------
// * AddReverseComplimentsToSamples
//---------------------------------------------------
// KJD This should be somewhere else, that's for sure. 
//
void
HashSamplingOverlapper::AddReverseCompliment(Sequence &Seq,SequenceVector &SeqVec)
{
  Sequence temp = Seq;
  temp.ReverseCompliment();
  temp.mName+="(-)";
  SeqVec.push_back(temp);
}

//---------------------------------------------------
// * SampleQuery
//---------------------------------------------------
//
void
HashSamplingOverlapper::SampleQuery(Sequence &QueryIn,SequenceVector &SamplesOut)
{
  int NumSamples = (int)((float)QueryIn.size()/mSampleSpacing);

  for(long sNum = 0;sNum < NumSamples;sNum++){
	 long start = (long)(sNum*mSampleSpacing);
	 long end = start+mSampleSize - 1;
	 SamplesOut.AddSubSequence(QueryIn,start,end);
  }
}


//---------------------------------------------------
// * CreateQuerySamples
//---------------------------------------------------
//
void
HashSamplingOverlapper::CreateQuerySamples(vector<SequenceVector> &QuerySamples)
{
  QuerySamples.clear();
  QuerySamples.resize(mpQueries->size());

  for(int q = 0;q< mpQueries->size();q++){
	 (*mpQueries)[q].ToLowerCase();
	 SampleQuery((*mpQueries)[q],QuerySamples[q]);
  }

  // Add reverse compliments
  //for(int q = 0;q< mpQueries->size();q++){
  // AddReverseCompliment(QuerySamples[q],QuerySamples);
  //}
}

//---------------------------------------------------
// * ComputeHitsAndIntervalHitsOnSourcesNR
//---------------------------------------------------
// !! Must be called after ComputeSampleHitStats
//
// NR for no reverse samples.  Old version, will be 
// removed when new is verified. 
//
void
HashSamplingOverlapper::ComputeHitsAndIntervalHitsOnSourcesNR(SampleSearchResultVec &SearchResults)
{
  if (SearchResults.size() == 0) return;

  SearchResults.SortBySourceLoc();
  SearchResults.SortBySourceIdx();
  
  register long LastSampleIdx = SearchResults[0].mSampleIdx;
  register long LastSourceLoc = SearchResults[0].mSourceLoc;

  // KJD r=0 correct start point for interval??  
  for(register int r = 0;r< SearchResults.size();r++){		

	 // Search engine has been changed so that SourceIdx is relative to global source
	 // idx, not to the local results. 
	 register long SourceIdx = SearchResults[r].mSourceIdx;
	 register long SampleIdx = SearchResults[r].mSampleIdx;
	 register long SourceLoc = SearchResults[r].mSourceLoc;
		
	 // Samples which hit more than the derived repeat threshold are not counted
	 if (mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		mTotalHitsOnSource[SourceIdx]++;
		mLastHitOnSource[SourceIdx] = SourceLoc;  
	 }else{
		mRepeatFilterCount++;
	 }
		
	 // The interval that these two samples are apart
	 long interval = SourceLoc - LastSourceLoc;
	 long targetInterval = (SampleIdx - LastSampleIdx)*mSampleSpacing;
	 
	 // Need something to check that samples are monotonic
	 
	 // If the intervals are close enough, record an interval hit
	 long intervalDiff = 	 abs(abs(interval)-abs(targetInterval));
	 if (intervalDiff < mSampleSize){
		mIntervalHitsOnSource[SourceIdx]++;
		mIntervalSpanOnSource[SourceIdx]+= abs(targetInterval);
	 }
	 LastSampleIdx = SampleIdx;
	 LastSourceLoc = SourceLoc;	
  }
}


//---------------------------------------------------
// * ComputeHitsOnSources
//---------------------------------------------------
// !! Must be called after ComputeSampleHitStats
//
void
HashSamplingOverlapper::ComputeHitsOnSources(SampleSearchResultVec &SearchResults)
{
  for(int r = 0;r< SearchResults.size();r++){
	 // Compute stats for countable hits (repeat filtered hits will 
	 // have -1 for Loc)
	 if (SearchResults[r].mSourceLoc >= 0){
		// Note: mSourceIdx is relative to the subset of Sources the current 
		// Stree is built for.   
		long SourceIdx = SearchResults[r].mSourceIdx;
		long SampleIdx = SearchResults[r].mQueryIdx;
		long SourceLoc = SearchResults[r].mSourceLoc;
		// Samples which hit more than the derived repeat threshold are not counted
		if (mTotalHitsBySample[SampleIdx] < mRepeatThreshold){
		  mTotalHitsOnSource[SourceIdx]++;
		  mLastHitOnSource[SourceIdx] = SourceLoc;
		}else{
		  mRepeatFilterCount++;
		}
	 }
  }
}
  if (gDEBUG){
	 ls3.verbose();
	 ls3<<"mStDevRepeatCutoff="<<mStDevRepeatCutoff<<endl;
	 ls3<<"mStDevHitsPerSample="<<mStDevHitsPerSample<<endl;
	 ls3<<"mRepeatThreshold="<<mRepeatThreshold<<endl;
	 ls3<<"TotalHits="<<TotalHits<<endl;
	 ls3<<"NumStatQueries="<<NumStatQueries<<endl;

	 ls3<<"HitsBySample:"<<endl;
	 for(int i = 0;i<HitsBySample.size();i++){
		ls3<<"Sample "<<i<<" "<<HitsBySample[i]<<endl;
	 }

	 // Write out the distribution of hits. 
	 PrintHitDistribution(ls3.verbose(),HitsBySample,mMeanHitsPerSample,
	 							 mStDevHitsPerSample);
  }

//---------------------------------------------------
// * InitSourceStatVectors
//---------------------------------------------------
//
void
HashSamplingOverlapper::InitSourceStatVectors(int NumSources,
															 int NumSamples,
															 SampleHitStats &Stats)
{
  //Stats.mTotalHitsOnSource.clear();
  //Stats.mLastHitOnSource.clear();
  //Stats.mIntervalHitsOnSource.clear();
  //Stats.mIntervalSpanOnSource.clear();

  // KJD Worry.  Assuming here, obviously, that they are all in sync. 
  //if (Stats.mSize != 0){
  //	 delete Stats.mTotalHitsOnSource;
  // delete Stats.mLastHitOnSource;
  // delete Stats.mIntervalHitsOnSource;
  // delete Stats.mIntervalSpanOnSource;
  //}

  if (Stats.mSSize != NumSamples){
	 if (Stats.mSSize != 0){
		delete Stats.mTotalHitsBySample;
	 }
	 Stats.mTotalHitsBySample = new int[NumSamples];
  }

  for(register int i = 0;i< NumSamples;i++){
	 Stats.mTotalHitsBySample[i] = 0;
  }


  // KJD Worry.  I'm relying on new to give me zeroed memory
  if (Stats.mSize != NumSources){
	 // Don't want to delete if this is first time. 
	 if (Stats.mSize != 0){
		cout<<"delete.  NumSource="<<NumSources<<" mSize="<<Stats.mSize<<endl;
		delete Stats.mTotalHitsOnSource;
		delete Stats.mLastHitOnSource;
		delete Stats.mIntervalHitsOnSource;
		delete Stats.mIntervalSpanOnSource;
	 }
	 Stats.mTotalHitsOnSource = new int[NumSources];
	 Stats.mLastHitOnSource = new int[NumSources];
	 Stats.mIntervalHitsOnSource = new int[NumSources];
	 Stats.mIntervalSpanOnSource = new int[NumSources];
  }

  //Stats.mTotalHitsOnSource.resize(NumSources);
  //Stats.mLastHitOnSource.resize(NumSources);
  //Stats.mIntervalHitsOnSource.resize(NumSources);
  //Stats.mIntervalSpanOnSource.resize(NumSources);

  for(register int i = 0;i< NumSources; i++){
	 Stats.mTotalHitsOnSource[i]=0;
	 Stats.mIntervalHitsOnSource[i]=0;
	 Stats.mIntervalSpanOnSource[i]=0;
	 Stats.mLastHitOnSource[i] = -1;
  }

  Stats.mSize = NumSources;
  Stats.mSSize = NumSamples;
}

//---------------------------------------------------
// * ClearSourceStatVectors
//---------------------------------------------------
//
void
HashSamplingOverlapper::ClearSourceStatVectors(SampleHitStats &Stats)
{
  for(int i = 0;i< Stats.mSize; i++){
	 Stats.mTotalHitsOnSource[i]=0;
	 Stats.mIntervalHitsOnSource[i]=0;
	 Stats.mIntervalSpanOnSource[i]=0;
  }  
}


#endif





