#include "stdafx.h"
#include "memes.h"

extern THash<TChA, TInt> StatsH;
// Global switches and configuration

//TIntH TMPConnCntH;
//TIntH	LinkTriadH, CoMTriadH;
//TIntFltH MixInDegH, MixOutDegH, MixExtInDegH, MixExtOutDegH;
//TIntPrFltH InCompH, OutCompH;
//THash<TInt, TFlt> DomTmH;	
	
//#define TEST
//
//#define		HiThresh							240
//#define		LoThresh							80
//
//#define		IsDeDuplicate					1
//#define		IsUseOnlyMediaDom			0
//#define		IsUseOnlyBlogDom			0
//
//#define		UseTotVol							0
//#define		IsDomGraphReady				1
//#define		IsPreSTATSReady				0
//#define		IsNormalizeNode				1
//
//#define		IsQtVarPrReady				0
//#define		IsAggregateBlog				1
//#define		IsTotQtVolHReady			1
//
//int		VMinEdgeWeight				=		1;	//
//int		VMinBlogMention				=		10; //
//int		VSameCount						=		1;
//double	VMinMultiple				=		3;
//double VMaxDifference				=		0.3;
//double VCoMThresh						=		0.75;
//
//
//#define		UsePROJ								1
//#define		UseNONNET							1
//#define		UseCOM								1
//#define		UseDEG								1
//#define		UseCONN								1
//#define		UseEXT								1
//#define		UseMIX								1
//#define		UseTIME								1

/////////////////////////////////////////////////
// Quote Statistics
int TQuote::GetFq() const {
  int fq=0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    fq+=TmUrlCntV[i].Cnt(); }
  return fq;
}

int TQuote::GetFq(const TSecTm& BegTm, const TSecTm& EndTm) const {
  int fq=0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm() >= BegTm && TmUrlCntV[i].Tm() < EndTm) {
      fq += TmUrlCntV[i].Cnt(); }
  }
  return fq;
}

int TQuote::GetDoms(const TQuoteBs& QtBs) const {
  THashSet<TChA> DomSet;
  for (int u = 0; u < TmUrlCntV.Len(); u++) {
    DomSet.AddKey(TStrUtil::GetDomNm(QtBs.GetStr(TmUrlCntV[u].UrlId())));
  }
  return DomSet.Len();
}

int TQuote::GetFq(const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  int fq=0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy) {
      fq += TmUrlCntV[i].Cnt(); }
  }
  return fq;
}

int TQuote::GetFq(const TSecTm& BegTm, const TSecTm& EndTm, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  int fq=0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm() >= BegTm && TmUrlCntV[i].Tm() <= EndTm && (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      fq += TmUrlCntV[i].Cnt(); }
  }
  return fq;
}

int TQuote::GetUrls(const TSecTm& BegTm, const TSecTm& EndTm, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  int urls=0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm() >= BegTm && TmUrlCntV[i].Tm() < EndTm &&
     (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      urls+=1; }
  }
  return urls;
}

TSecTm TQuote::GetPeakTm(const TTmUnit& TmUnit, const TSecTm& AfterTm) const {
  TInt FreqAtPeak;
  return GetPeakTm(TmUnit, AfterTm, FreqAtPeak);
}

TSecTm TQuote::GetPeakTm(const TTmUnit& TmUnit, const TSecTm& AfterTm, TInt& FreqAtPeak) const {
  const TSecTm After = AfterTm.Round(TmUnit);
  THash<TSecTm, TInt> TmFqH;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm().Round(TmUnit) >= After) {
      TmFqH.AddDat(TmUrlCntV[i].Tm().Round(TmUnit)) += TmUrlCntV[i].Cnt(); }
  }
  if (TmFqH.Empty()) {
    FreqAtPeak = 0;
    return GetPeakTm(TmUnit, TSecTm(1));
  }
  TmFqH.SortByDat(false);
  FreqAtPeak = TmFqH[0];
  return TmFqH.GetKey(0);
}

TSecTm TQuote::GetPeakTm(const TTmUnit& TmUnit, const TSecTm& AfterTm, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  const TSecTm After = AfterTm.Round(TmUnit);
  THash<TSecTm, TInt> TmFqH;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm().Round(TmUnit) >= After && (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      TmFqH.AddDat(TmUrlCntV[i].Tm().Round(TmUnit)) += TmUrlCntV[i].Cnt(); }
  }
  if (TmFqH.Empty()) {
    return GetPeakTm(TmUnit, TSecTm(1)); }
  TmFqH.SortByDat(false);
  return TmFqH.GetKey(0);
}

TSecTm TQuote::GetMeanTm(const TTmUnit& TmUnit, const TUrlTy& UrlTy, const TQuoteBs& QtBs, const TSecTm& AfterTm) const {
  const TSecTm After = AfterTm.Round(TmUnit);
  double MeanTm = 0;
  int Cnt = 0;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm().Round(TmUnit) >= After && (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      MeanTm += TmUrlCntV[i].Tm().Round(TmUnit) * TmUrlCntV[i].Cnt();
      Cnt += TmUrlCntV[i].Cnt();
    }
  }
  return TSecTm(uint(MeanTm/double(Cnt))).Round(TmUnit);
}

TSecTm TQuote::GetMedianTm(const TTmUnit& TmUnit, const TUrlTy& UrlTy, const TQuoteBs& QtBs, const TSecTm& AfterTm) const {
  const TSecTm After = AfterTm.Round(TmUnit);
  TMom Mom;
  for(int i=0; i<TmUrlCntV.Len(); i++) {
    if (TmUrlCntV[i].Tm().Round(TmUnit) >= After && (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      Mom.Add(TmUrlCntV[i].Tm().Round(TmUnit).GetAbsSecs(), TmUrlCntV[i].Cnt());
    }
  }
  Mom.Def();
  return TSecTm(uint(Mom.GetMedian())).Round(TmUnit);
}

bool TQuote::IsSinglePeak(const TTmUnit& TmUnit, const TSecTm& AfterTm, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  const double PeakThresh = 0.4;
  TTmFltPrV FqOtV, SmoothV;
  GetFqOt(FqOtV, TmUnit, UrlTy, QtBs);
  TQuote::GetSmoothFqOt(SmoothV, FqOtV, TmUnit, 48, 1.2, AfterTm); // smoot the Fq vector
  SmoothV.Swap(FqOtV);
  double MaxVal=0;
  int maxI=0, maxL=0, maxR=0;
  for (int i = 0; i < FqOtV.Len(); i++) {
    if (MaxVal < FqOtV[i].Val2) {
      MaxVal = FqOtV[i].Val2;  maxI=i; }
  }
  MaxVal *= PeakThresh;
  for (maxL = maxI; maxL>0 && FqOtV[maxL].Val2 > MaxVal; maxL--) { } //printf ("  %d\t%f\n", maxL, FqOtV[maxL].Val2);}
  for (maxR = maxI; maxR<FqOtV.Len() && FqOtV[maxR].Val2 > MaxVal; maxR++) { } //printf ("    %d\t%f\n", maxR, FqOtV[maxR].Val2);}
  if (maxR-maxL > 100) { return false; }
  for (int i = maxR; i < FqOtV.Len(); i++) {
    // has to monotonically decrease or be below 50% of max
    if (FqOtV[i].Val2 > MaxVal) { return false; }
  }
  // has to monotonically increase or be below 50% of max
  for (int i = maxL; i >= 0; i--) {
    if (FqOtV[i].Val2 > MaxVal) { return false; }
  }
  return true;
}

void TQuote::GetFqOt(TTmFltPrV& RawFqOtV, const TTmUnit& TmUnit) const {
  THash<TSecTm, TFlt> TmCntH;
  for (int i = 0; i < TmUrlCntV.Len(); i++) {
    TmCntH.AddDat(TmUrlCntV[i].Tm().Round(TmUnit)) += TmUrlCntV[i].Cnt();
  }
  TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(RawFqOtV);
}

void TQuote::GetFqOt(TTmFltPrV& RawFqOtV, const TTmUnit& TmUnit, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  THash<TSecTm, TFlt> TmCntH;
  for (int i = 0; i < TmUrlCntV.Len(); i++) {
    if (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy) {
      TmCntH.AddDat(TmUrlCntV[i].Tm().Round(TmUnit)) += TmUrlCntV[i].Cnt();
    }
  }
  TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(RawFqOtV);
}

void TQuote::GetFqOt(TTmFltPrV& RawFqOtV, const TTmUnit& TmUnit, const TSecTm& BegTm, const TSecTm& EndTm) const {
  const TSecTm RBegTm = BegTm.Round(TmUnit);
  const TSecTm REndTm = EndTm.Round(TmUnit);
  THash<TSecTm, TFlt> TmCntH;
  for (int i = 0; i < TmUrlCntV.Len(); i++) {
    const TSecTm Tm = TmUrlCntV[i].Tm().Round(TmUnit);
    if (Tm >= RBegTm && Tm <= REndTm) {
      TmCntH.AddDat(Tm) += TmUrlCntV[i].Cnt();
    }
  }
  TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(RawFqOtV);
}

void TQuote::GetFqOt(TTmFltPrV& RawFqOtV, const TTmUnit& TmUnit, const TSecTm& BegTm, const TSecTm& EndTm, const TUrlTy& UrlTy, const TQuoteBs& QtBs) const {
  const TSecTm RBegTm = BegTm.Round(TmUnit);
  const TSecTm REndTm = EndTm.Round(TmUnit);
  THash<TSecTm, TFlt> TmCntH;
  for (int i = 0; i < TmUrlCntV.Len(); i++) {
    const TSecTm Tm = TmUrlCntV[i].Tm().Round(TmUnit);
    if (Tm >= RBegTm && Tm <= REndTm && (UrlTy==utUndef || QtBs.GetUrlTy(TmUrlCntV[i].UrlId())==UrlTy)) {
      TmCntH.AddDat(Tm) += TmUrlCntV[i].Cnt();
    }
  }
  TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(RawFqOtV);
}

void TQuote::GetSmoothFqOt(TTmFltPrV& FqOtV, const TTmUnit& TmUnit, const int& WndSz, const double& Smooth, const TSecTm& BegTm, const TSecTm& EndTm) const {
  TTmFltPrV RawFqOtV;
  GetFqOt(RawFqOtV, TmUnit, BegTm, EndTm);
  GetSmoothFqOt(FqOtV, RawFqOtV, TmUnit, WndSz, Smooth, BegTm, EndTm);
}

void TQuote::GetSmoothFqOt(TTmFltPrV& SmoothFqOtV, const TTmFltPrV& RawFqOtV, const TTmUnit& TmUnit, const int& WndSz, const double& Smooth, const TSecTm& BegTm, const TSecTm& EndTm) {
  const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
  const int HalfWndSecs = (WndSz/2)*TmUnitSecs;
  double FullNormConst = 1;
  for (int j = 1; j <= WndSz/2; j++) {
    FullNormConst += 2*pow(Smooth, -j); }
  THash<TSecTm, TFlt> TmCntH;
  for (int i = 0; i < RawFqOtV.Len(); i++) {
    const TSecTm Tm = RawFqOtV[i].Val1;
    double NormConst = FullNormConst;
    // calculate new normalizing constant if the window does not fit
    if (Tm < BegTm+HalfWndSecs || Tm+HalfWndSecs > EndTm) {
      NormConst = 1;
      for (int j = 1; j <= WndSz/2; j++) {
        if (Tm >= BegTm+j*TmUnitSecs) { NormConst += pow(Smooth, -j);  }
        if (Tm+j*TmUnitSecs <= EndTm) { NormConst += pow(Smooth, -j);  }
    } }
    const double NormFq = RawFqOtV[i].Val2 / NormConst;
    for (int j = 1; j <= WndSz/2; j++) {
      const int Off = j*TmUnitSecs;
      if (Tm+Off <= EndTm) {
        TmCntH.AddDat(TSecTm(Tm+Off).Round(TmUnit)) += NormFq * pow(Smooth, -j); }
      if (Tm >= BegTm + Off) {
        TmCntH.AddDat(TSecTm(Tm-Off).Round(TmUnit)) += NormFq * pow(Smooth, -j); }
    }
    TmCntH.AddDat(Tm.Round(TmUnit)) += NormFq;
  }
  TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(SmoothFqOtV);
}

TStr TQuote::GetDesc() const {
  return TStr::Fmt("#:%d U:%d  %s", GetFq(), GetUrls(), QtStr.CStr());
}

void TQuote::PlotOverTm(const TStr& OutFNm) {
  TFltFltH HrCntH;
  TmUrlCntV.Sort();
  if (TmUrlCntV.Empty()) { return; }
  for (int i = 0; i < TmUrlCntV.Len(); i++) {
    double Hr = TSecTm(TmUrlCntV[i].Tm()-TmUrlCntV[0].Tm()).Round(tmu6Hour)/(24*3600.0);
    /*for (int j = 1; j < 6; j++) {
      HrCntH.AddDat(Hr+j) += TmUrlCntV[i].Cnt() * pow(1.2,-j);
      if (Hr-j>0) HrCntH.AddDat(Hr-j) += TmUrlCntV[i].Cnt() * pow(1.2,-j);
    }*/
    HrCntH.AddDat(Hr) += TmUrlCntV[i].Cnt();
  }
  HrCntH.SortByKey();
  TGnuPlot::PlotValCntH(HrCntH, OutFNm, TStr::Fmt("%d occurences, %d urls: %s", GetFq(), GetUrls(), QtStr.CStr()),
    TStr::Fmt("Time [days] from %s", TmUrlCntV[0].Tm().GetYmdTmStr().CStr()), "Frequency");
}

void TQuote::LoadQtV(const TStr& InFNm, TVec<TQuote>& QtV) {
  PSIn SIn = TZipIn::IsZipFNm(InFNm) ? TZipIn::New(InFNm) : TFIn::New(InFNm);
  QtV.Clr(false);
  while (! SIn->Eof()) {
    QtV.Add();
    QtV.Last().Load(*SIn);
  }
}

void TQuote::ComputeTmStats() {
	double totCnt = 0;
	TmUrlCntV.Sort(true);
	for (int i = 0; i < TmUrlCntV.Len(); i++)
		totCnt += TmUrlCntV[i].Cnt();
	// Calculate median time. use median instead of average.
	double medianCnt = 0;
	for (int i = 0; i < TmUrlCntV.Len(); i++) {
		medianCnt += TmUrlCntV[i].Cnt();
		if (medianCnt >= (totCnt-0.5)/2) {
			TmMed = TmUrlCntV[i].Tm();
			break;
		}
	}
  // Calcuate norm-1 distance to TmMed
	double sumNorm1 = 0;
	for (int i = 0; i < TmUrlCntV.Len(); i++) {
    sumNorm1 += TmUrlCntV[i].Cnt() * fabs(double(TmUrlCntV[i].Tm().GetAbsSecs()) - double(TmMed.GetAbsSecs())) / 86400.0; 
  }
	TmDev = sumNorm1 / totCnt;  // Use norm 1 distance as the measure of time deviation	
}

/////////////////////////////////////////////////
// Quote Base
TQuoteBs::TQuoteBs(TSIn& SIn) : StrQtIdH(SIn), QuoteH(SIn), UrlInDegH(SIn), UrlLkH(SIn), UrlTyH(SIn) {
  if (! SIn.Eof()) { ClustQtIdVH.Load(SIn); }
  TIntSet CIdSet;
  for (int q = 0; q < ClustQtIdVH.Len(); q++) {
    CIdSet.AddKey(ClustQtIdVH.GetKey(q));
    for (int c = 0; c < ClustQtIdVH[q].Len(); c++) {
      CIdSet.AddKey(ClustQtIdVH[q][c]); } // make sure there are no duplicates
    CIdSet.GetKeyV(ClustQtIdVH[q]);
    ClustQtIdVH[q].Sort();
    CIdSet.Clr(false);
  }
	ComputeMnMxTm();
  Dump();
}

// Compute the time range of the quotes 
void TQuoteBs::ComputeMnMxTm() {
  // set min and max quote base time
  MnTm = TSecTm(TInt::Mx-1);
  MxTm = TSecTm(1);
  for (int i = 0; i < QuoteH.Len(); i++) {
	  const TQuote& Q = QuoteH[i];
	  for (int j = 0; j < Q.TmUrlCntV.Len(); j++) {
      if (Q.TmUrlCntV[j].Tm() > MxTm) { MxTm = Q.TmUrlCntV[j].Tm(); }
      if (Q.TmUrlCntV[j].Tm() < MnTm) { MnTm = Q.TmUrlCntV[j].Tm(); }
	  }
  }
  printf("Min Time = %s, Max Time = %s\n", MnTm.GetStr().CStr(), MxTm.GetStr().CStr());
}

void TQuoteBs::Save(TSOut& SOut) const {
  StrQtIdH.Save(SOut);
  QuoteH.Save(SOut);
  UrlInDegH.Save(SOut);
	UrlLkH.Save(SOut);
  UrlTyH.Save(SOut);
  ClustQtIdVH.Save(SOut);
}

PQuoteBs TQuoteBs::New() {
  return PQuoteBs(new TQuoteBs());
}

PQuoteBs TQuoteBs::Load(TSIn& SIn) {
  printf("loading %s...", SIn.GetSNm().CStr());
  return PQuoteBs(new TQuoteBs(SIn));
}

// Generate a sample quote base by filtering out quote with frequency lower than a given threshold 
void TQuoteBs::GetSampleQtBs(int MinFq, TStr Pref) {
	printf("Generating a sample of the current quote base, minimum frequency = %d\n", MinFq);
	PQuoteBs QtBs = TQuoteBs::New();
	QtBs->MnTm = MnTm; QtBs->MxTm = MxTm;
	for (int i = 0; i < QuoteH.Len(); i++) {
		if (QuoteH[i].GetUrls() < MinFq) continue;
		QtBs->AddQuote(QuoteH[i], *this);
	}
	QtBs->SetUrlTy(TStr("news_media.txt"), utMedia);
	TStr OutFNm = TStr::Fmt("%s-w4mfq%d.QtBs", Pref.CStr(), MinFq);
	TFOut FOut(OutFNm); 
	QtBs->Save(FOut);
	printf("Sample quote base successfully generated\n");
}

void TQuoteBs::GetSampleQtBs(TIntV& QtV, TStr Pref) {
	printf("Generating a sample of current quote base, QtV Len = %d\n", QtV.Len());
	PQuoteBs QtBs = TQuoteBs::New();
	QtBs->MnTm = MnTm; QtBs->MxTm = MxTm;
	for (int i = 0; i < QtV.Len(); i++) {
		int QtN = QtV[i];
		QtBs->AddQuote(QuoteH[QtN], *this);
	}
	QtBs->SetUrlTy(TStr("news_media.txt"), utMedia);
	TStr OutFNm = TStr::Fmt("%s.QtBs", Pref.CStr());
	TFOut FOut(OutFNm); 
	QtBs->Save(FOut);
	printf("Sample quote base successfully generated\n");
}

void TQuoteBs::GetQtIdV(TIntV& QtIdV) const {
  QtIdV.Clr();
  for (int q = 0; q < QuoteH.Len(); q++) {
    QtIdV.Add(QuoteH.GetKey(q)); }
}

TUrlTy TQuoteBs::GetUrlTy(const int& UrlId) const {
  if (UrlTyH.IsKey(UrlId)) {
    return (TUrlTy) UrlTyH.GetDat(UrlId).Val; } // return type
  return utBlog; // blog by default
}

void TQuoteBs::SetUrlTy(const TStr& InFNm, const TUrlTy& SetTy) {
  printf("Set url type\n");
  //UrlTyH.Clr(); //!!!
  TStrHash<TIntV> DomUrlV; // urls from each domain
  for (int q = 0; q < Len(); q++) {
    const TQuote::TTmUrlCntV& V = GetQtN(q).TmUrlCntV;
    for (int u = 0; u < V.Len(); u++) {
      const TChA Url = GetStr(V[u].UrlId());
      DomUrlV.AddDat(TStrUtil::GetDomNm(Url).CStr()).Add(V[u].UrlId());
    }
  }
  printf("  %d domains\n", DomUrlV.Len());
  TStrV TyUrlV; TStr Ln;
  if (! TFile::Exists(InFNm)) {
    printf("!!! %s does not exist\n", InFNm.CStr());
    return;
  }
  for (TFIn FIn(InFNm); FIn.GetNextLn(Ln); ) { TyUrlV.Add(Ln.GetTrunc()); }
  printf("  %d domains with type label loaded\n", TyUrlV.Len());
  int NDomSet=0, NUrlSet=0;
  TExeTm ExeTm;
  for (int d = 0; d < DomUrlV.Len(); d++) {
    for (int u = 0; u < TyUrlV.Len(); u++) {
      if (strstr(DomUrlV.GetKey(d), TyUrlV[u].CStr()) != NULL) {
        const TIntV& urlV = DomUrlV[d];
        for (int i = 0; i < urlV.Len(); i++) {
          UrlTyH.AddDat(urlV[i], SetTy); }
        NDomSet++;  NUrlSet+=urlV.Len();
        break;
      }

    }
    if (d % 1000 == 0) { printf("  %d/%d: labeled %d doms, %d urls [%s]\n", d, DomUrlV.Len(), NDomSet, NUrlSet, ExeTm.GetStr()); }
  }
  printf("  labeled %d doms, %d urls [%s]\n", NDomSet, NUrlSet, ExeTm.GetStr());
  printf("  %d total labeled urls\n", UrlTyH.Len());
}

bool AppearsAt(const TIntSet& UrlSet, const TQuote& Q) {
  for (int u = 0; u < Q.GetUrls(); u++) {
    if (UrlSet.IsKey(Q.GetUrlId(u))) { return true; }
  }
  return false;
}

// Also merges quotes that are in clusters into a single quote
void TQuoteBs::GetQtIdVByFq(TIntV& QtIdV, const int& MinWrdLen, const int& MinQtFq, const bool& OnlyClustRoots, const TStr& HasWord, const TStr& AppearsAtUrl, const TUrlTy& OnlyCountTy, const TSecTm& BegTm, const TSecTm& EndTm) const {
  printf("Get top quotes from %d quotes\n", QuoteH.Len());
  printf("  %s -- %s\n", BegTm.GetYmdTmStr().CStr(), EndTm.GetYmdTmStr().CStr());
  TIntPrV FqQtIdV;
  int words=0, minFq=0, hasWord=0;
  TIntSet CSet, QSet;

  TIntSet UrlSet;
  if (! AppearsAtUrl.Empty()) {
    for (int q = 0; q < QuoteH.Len(); q++) {
      const TQuote& Q = GetQtN(q);
      TQuote::TTmUrlCntV TmUrlCntV(Q.GetUrls(), 0);
      for (int u = 0; u < Q.GetUrls(); u++) {
        if (UrlSet.IsKey(Q.GetUrlId(u))) {
          TmUrlCntV.Add(Q.TmUrlCntV[u]);
          continue; }
        if (strstr(GetStr(Q.GetUrlId(u)), AppearsAtUrl.CStr())!=NULL) {
          TmUrlCntV.Add(Q.TmUrlCntV[u]);
          UrlSet.AddKey(Q.GetUrlId(u)); } // add all urls
      }
      TQuote* QPt = (TQuote*) &(QuoteH[q]);
      QPt->TmUrlCntV = TmUrlCntV; // keep only scientific site appearances
    }
  }
  printf("done.\n");
  //for (TStr Line; FIn.GetNextLn(Line); ) { Line.ToTrunc(); BlackListH.AddKey(Line.GetLc()); }
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote& Qt = GetQtN(q);
    //if (BlackListH.IsKey(Qt.QtStr)) { continue; } // skip black list
    if (TStrUtil::CountWords(Qt.QtStr.CStr()) < MinWrdLen) { words++;  continue; }
    if ((! HasWord.Empty()) && Qt.QtStr.SearchStr(HasWord)==-1) { hasWord++;  continue; }
    int Fq = 0;
    if (OnlyClustRoots) {
		  if (Qt.GetTy() == qtRoot) {
		    IAssert(Qt.GetCId() == GetQtId(q));
		    IAssert(! CSet.IsKey(Qt.GetCId()));  CSet.AddKey(Qt.GetCId());
		    if ((! HasWord.Empty()) && GetQt(GetCentrQtId(Qt.GetCId())).GetStr().SearchStr(HasWord)==-1) { hasWord++; continue; } // the quote we display has to have a word
		    Fq = GetClustFq(Qt.GetCId(), OnlyCountTy); // get frequency of all quotes in the cluster
		    TQuote CentrQt;  GetMergedClustQt(Qt.GetCId(), CentrQt, false); // get frequency only  after the centroid appears Was true originally
		    if (! UrlSet.Empty() && ! AppearsAt(UrlSet, CentrQt)) { continue; }
		    if (Fq < MinQtFq) { minFq++; continue; }
		    const int Doms = CentrQt.GetDoms(*this);
        // remove quotes that appeared at less than 3 domains and 5*domains < Urls
		    if (Doms < 3 || 5*Doms < CentrQt.GetUrls()) { continue; } 
		  } else {
			  continue; }
	  } else {
      if (! UrlSet.Empty() && ! AppearsAt(UrlSet, Qt)) { continue; }
      Fq = Qt.GetFq(BegTm, EndTm, OnlyCountTy, *this);
      if (Fq < MinQtFq) { minFq++; continue; }
      const int Doms = Qt.GetDoms(*this);
      if (Doms < 3 || 4*Doms < Qt.GetUrls()) { continue; }
    }
    IAssert(! QSet.IsKey(GetQtId(q))); QSet.AddKey(GetQtId(q));
    FqQtIdV.Add(TIntPr(Fq, GetQtId(q)));
  }
  printf("  skip %d : word len < %d\n", words, MinWrdLen);
  printf("  skip %d : qt fq < %d\n", minFq, MinQtFq);
  if (! HasWord.Empty()) { printf("  skip %d : not containing '%s'\n", hasWord, HasWord.CStr()); }
  printf("  remaining %d quotes\n", FqQtIdV.Len());
  FqQtIdV.Sort(false);
  QtIdV.Clr(false);
  TIntSet SeenSet;
  for (int i = 0; i < FqQtIdV.Len(); i++) {
    if (i < 100) { printf(" fq:%d", FqQtIdV[i].Val1()); }
    const int qid = FqQtIdV[i].Val2;
    if (! SeenSet.IsKey(qid)) {
      QtIdV.Add(qid);
      SeenSet.AddKey(qid);
      // if we get a root, don't also get the children
      if (GetQt(qid).GetTy()==qtRoot || GetQt(qid).GetTy()==qtCentr) {
        const TIntV& ClustV = GetClust(GetQt(qid).GetCId());
        for (int c = 0; c < ClustV.Len(); c++) { SeenSet.AddKey(ClustV[c]); }
      }
    }
  }
  printf("  return %d quotes\n", QtIdV.Len());
}

int AppearsAtDom(const TQuote& CentrQt, const TIntSet& GoodDom, const TIntH& UrlDomH) {
  int DomCnt = 0;
  for (int u = 0; u < CentrQt.GetUrls(); u++) {
    const int U = CentrQt.GetUrlId(u);
    IAssert(UrlDomH.IsKey(U));
    if (GoodDom.IsKey(UrlDomH.GetDat(U))) { // appears at good domain
      DomCnt++; }
  }
  return DomCnt;
}

void TQuoteBs::GetQtIdVByFq(TIntV& QtIdV, const int& MinWrdLen, const int& MinQtFq, const TStrV& FromDomains,
                            const bool& OnlyClustRoots, const TStr& HasWord, int MinDoms) const {
  printf("Get top quotes from %d quotes appearing at %d domains\n", QuoteH.Len(), FromDomains.Len());
  TIntPrV FqQtIdV;
  TIntH UrlDomH;
  TStrSet DomainSet;
  TIntSet QSet;
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote& Q = GetQtN(q);
    for (int u = 0; u < Q.GetUrls(); u++) {
      const int U = Q.GetUrlId(u);
      const int D = DomainSet.AddKey(TStrUtil::GetDomNm2(GetStr(U)));
      UrlDomH.AddDat(U, D);
    }
  }
  TIntSet GoodDom;
  for (int d = 0; d < DomainSet.Len(); d++) {
    for (int f = 0; f < FromDomains.Len(); f++) {
      if (DomainSet[d].SearchStr(FromDomains[f]) != -1) {
        GoodDom.AddKey(d); break; }
    }
  }
  printf("%d total domains\n", DomainSet.Len());
  printf("%d from domains\n", FromDomains.Len());
  printf("%d good domains\n", GoodDom.Len());
  printf("done.\n\n");
  const TSecTm BegTm(1), EndTm(TSecTm::GetCurTm());
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote& Qt = GetQtN(q);
    if (TStrUtil::CountWords(Qt.QtStr.CStr()) < MinWrdLen) { continue; }
    if ((! HasWord.Empty()) && Qt.QtStr.SearchStr(HasWord)==-1) { continue; }
    int Fq = 0;
    if (Qt.GetTy() == qtRoot) {
      TQuote CentrQt;
      GetMergedClustQt(Qt.GetCId(), CentrQt, true); // get frequency only after the centroid appears
      if (AppearsAtDom(CentrQt, GoodDom, UrlDomH) < MinDoms) { continue; }
      Fq = CentrQt.GetFq(BegTm, EndTm, utUndef, *this);
      if (Fq < MinQtFq) { continue; }
      const int Doms = CentrQt.GetDoms(*this);
      if (Doms < 3 || 5*Doms < CentrQt.GetUrls()) { continue; }
    }
    else if (! OnlyClustRoots) {
      if (AppearsAtDom(Qt, GoodDom, UrlDomH) < MinDoms) { continue; }
      Fq = Qt.GetFq(BegTm, EndTm, utUndef, *this);
      if (Fq < MinQtFq) { continue; }
      const int Doms = Qt.GetDoms(*this);
      if (Doms < 3 || 4*Doms < Qt.GetUrls()) { continue; }
    }
    IAssert(! QSet.IsKey(GetQtId(q))); QSet.AddKey(GetQtId(q));
    FqQtIdV.Add(TIntPr(Fq, GetQtId(q)));
  }
  printf("  remaining %d quotes\n", FqQtIdV.Len());
  FqQtIdV.Sort(false);
  QtIdV.Clr(false);
  TIntSet SeenSet;
  for (int i = 0; i < FqQtIdV.Len(); i++) {
    const int qid = FqQtIdV[i].Val2;
    if (! SeenSet.IsKey(qid)) {
      QtIdV.Add(qid);
      SeenSet.AddKey(qid);
      // if we get a root, don't also get the children
      if (GetQt(qid).GetTy()==qtRoot || GetQt(qid).GetTy()==qtCentr) {
        const TIntV& ClustV = GetClust(GetQt(qid).GetCId());
        for (int c = 0; c < ClustV.Len(); c++) { SeenSet.AddKey(ClustV[c]); }
      }
    }
  }
  printf("  return %d quotes\n", QtIdV.Len());
}

void TQuoteBs::GetQtIdVByTm(const int& WndSzHr, const int& StepHr, const int& MinWrdLen, const int& MinQtFq, const int& TakePerStep) const {
  const TTmUnit TmUnit = tmu4Hour;
  TSecTm MinTm, MaxTm;  GetMinMaxTm(MinTm, MaxTm);
  MinTm=MinTm.Round(TmUnit); MaxTm=MaxTm.Round(TmUnit);
  printf("Dataset span %s -- %s\n", MinTm.GetStr().CStr(), MaxTm.GetStr().CStr());
  printf("  time window %dh, step size %dh, min wrd len %d, take top %d per step\n", WndSzHr, StepHr, MinWrdLen, TakePerStep);
  TIntSet TopQtIdSet;
  TIntV TopQtIdV;
  int cnt=0;
  FILE *F = fopen("top_qts_per_time_unit.txt", "wt");
  for (TSecTm Tm=MinTm; Tm <= MaxTm; Tm += StepHr*3600, cnt++) {
    GetQtIdVByFq(TopQtIdV, MinWrdLen, MinQtFq, true, "", "", utUndef, TSecTm(Tm), TSecTm(Tm+StepHr*3600));
    fprintf(F,"week of: %s\n", Tm.GetYmdTmStr().CStr());
    for (int q = 0, j=0; q < TopQtIdV.Len() && j<3; q++) {
      if (! TopQtIdSet.IsKey(TopQtIdV[q])) {
        fprintf(F, "%s\t%d\n", GetQt(GetCentrQtId(TopQtIdV[q])).GetStr().CStr(), GetClustFq(TopQtIdV[q]));
        j++; }
      TopQtIdSet.AddKey(TopQtIdV[q]);
    }
    fprintf(F, "\n");
  }
  fclose(F);
  printf("  done %d quotes\n", TopQtIdSet.Len());
  //TIntV QtIdV;
  //TopQtIdSet.GetKeyV(QtIdV);
  //return QtIdV;
  //printf("  return %d quotes\n", QtIdV.Len());
}

void TQuoteBs::GetCIdVByFq(TIntV& CIdV, const int& MinClFq, const TStr& RootHasWord, const TUrlTy& OnlyCountTy, const bool& OnlyAfterRoot, const TSecTm& BegTm, const TSecTm& EndTm) const {
  printf("Get top clusters from %d clusters\n", GetClusts());
  TIntPrV FqCIdV;
  for (int c = 0; c < GetClusts(); c++) {
    const int CId = GetCId(c);
    if ( ! IsQtId(CId)) { printf("!!! %d:%d\n", c, CId); continue; }
    IAssert(GetQt(CId).GetTy()==qtRoot);
    const int CentrQId = GetCentrQtId(CId);
    if (CentrQId == -1) { continue; }
    if ((! RootHasWord.Empty()) && GetQt(CentrQId).GetStr().SearchStr(RootHasWord)==-1) { continue; } // the quote we display has to have a word
    TQuote CentrQt;  GetMergedClustQt(CId, CentrQt, OnlyAfterRoot); // get frequency only after the centroid appears
    const int Fq = CentrQt.GetFq(BegTm, EndTm, OnlyCountTy, *this);
    if (Fq < MinClFq) { continue; }
    FqCIdV.Add(TIntPr(Fq, CId));
  }
  printf("  remaining %d clusters\n", FqCIdV.Len());
  FqCIdV.Sort(false);
  CIdV.Clr(false);
  for (int i = 0; i < FqCIdV.Len(); i++) {
    CIdV.Add(FqCIdV[i].Val2); }
}

void TQuoteBs::GetMinMaxTm(TSecTm& MinTm, TSecTm& MaxTm) const {
  /*MinTm = MaxTm = TSecTm();
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote::TTmUrlCntV& V = QuoteH[q].TmUrlCntV;
    if (V.Empty()) { continue; }
    if (! MinTm.IsDef() || MinTm > V[0].Tm()) {
      MinTm = V[0].Tm(); }
    if (! MaxTm.IsDef() || MaxTm < V[0].Tm()) {
      MaxTm = V[0].Tm(); }
  }*/
  MinTm = TQuoteBs::MnTm;
  MaxTm = TQuoteBs::MxTm;
}

void TQuoteBs::GetMinMaxTm(const TIntV& QtIdV, TSecTm& MinTm, TSecTm&MaxTm) const {
  MinTm = MaxTm = TSecTm();
  for (int q = 0; q < QtIdV.Len(); q++) {
    const TQuote::TTmUrlCntV& V = GetQt(QtIdV[q]).TmUrlCntV;
    if (V.Empty()) { continue; }
    if (! MinTm.IsDef() || MinTm > V[0].Tm()) {
      MinTm = V[0].Tm(); }
    if (! MaxTm.IsDef() || MaxTm < V[0].Tm()) {
      MaxTm = V[0].Tm(); }
  }
}

// For each quote in QtIdV get the most representative url containing the quote
void TQuoteBs::GetQtPageUrl(const TIntV& QtIdV, TIntH& QtUrlIdH) const {
  THash<TChA, TInt> DomQtCntH;
  for (int q = 0; q < Len(); q++) {
    const TQuote& Q = GetQtN(q);
    for (int u = 0; u < Q.GetUrls(); u++) {
      DomQtCntH.AddDat(TStrUtil::GetDomNm(GetStr(Q.TmUrlCntV[u].UrlId()))) += Q.TmUrlCntV[u].Cnt(); }
  }
  // stop-list
  const TStr StopList = "blog.myspace.com|www.newsmeat.com|us.rd.yahoo.com|www.startribune.com|"
    "news.originalsignal.com|uk.news.yahoo.com|ap.google.com|www.cnn.com|www.opednews.com";
  TStrV StopListV; StopList.SplitOnAllCh('|', StopListV);
  for (int s = 0; s < StopListV.Len(); s++) {
    DomQtCntH.AddDat(StopListV[s]) = 1; }
  DomQtCntH.SortByDat(false);
  QtUrlIdH.Clr(false);
  // find url from best domain for each quote
  for (int q = 0; q < QtIdV.Len(); q++) {
    const TQuote& Q = GetQt(QtIdV[q]);
    int DomFq=0, BestUrlId=0;
    for (int u = 0; u < Q.TmUrlCntV.Len(); u++) {
      const TChA Dom = TStrUtil::GetDomNm(GetStr(Q.TmUrlCntV[u].UrlId()));
      if (DomFq < DomQtCntH.GetDat(Dom)) {
        DomFq = DomQtCntH.GetDat(Dom);
        BestUrlId = Q.TmUrlCntV[u].UrlId();
      }
    }
    QtUrlIdH.AddDat(QtIdV[q], BestUrlId);
  }
  //FILE *F = fopen("quotes_per_domain.tab", "wt");
  //for (int d = 0; d < DomQtCntH.Len(); d++) {
  //fprintf(F, "%s\t%d\n", DomQtCntH.GetKey(d).CStr(), DomQtCntH[d]); }
  //fclose(F);
}

void TQuoteBs::AddQuote(const TQuote& Quote, const TQuoteBs& CurQtBs) {
  //const int QtId = CurQtBs.GetQtId(Quote.GetStr().CStr());
	const int QtId = QuoteH.Len();
  TQuote& Qt = QuoteH.AddDat(QtId);
  StrQtIdH.AddDat(Quote.GetStr().CStr(), QtId);
  Qt.QtCIdTy = TQuote::TQtIdTy(QtId, qtQuote);
  Qt.QtStr = Quote.QtStr;
  Qt.TmUrlCntV = Quote.TmUrlCntV;
  for (int u = 0; u < Qt.TmUrlCntV.Len(); u++) {
    Qt.TmUrlCntV[u].SetUrlId(AddStr(CurQtBs.GetStr(Quote.TmUrlCntV[u].UrlId())));
  }
}

void TQuoteBs::AddQuote(const TVec<TChA>& QuoteV, const TVec<TChA>& LinkV, const TChA& PostUrlStr, const TSecTm& PubTm, const int& MinQtWrdLen) {
  TIntH QtCntH; // qid --> cnt
  for (int q = 0; q < QuoteV.Len(); q++) {
    if (TStrUtil::CountWords(QuoteV[q].CStr()) < MinQtWrdLen) { continue; } // skip if too short
    int QtId = GetQtId(QuoteV[q].CStr());
    if (QtId == -1) { // new quote
      IAssert(! IsStr(QuoteV[q].CStr()));
      QtId = QuoteH.Len();
      TQuote& Qt = QuoteH.AddDat(QtId);
      Qt.QtCIdTy = TQuote::TQtIdTy(QtId, qtQuote);
      Qt.QtStr = QuoteV[q];
      StrQtIdH.AddDat(QuoteV[q].CStr(), QtId);
    } else { IAssert(IsStr(QuoteV[q].CStr())); }
    IAssert(IsQtId(QtId));
    QtCntH.AddDat(QtId) += 1; // count appearances of each quote
  }
  if (QtCntH.Len() <= 0) 
	  return;

  const int PostUrlId = AddStr(PostUrlStr);
  for (int i = 0; i < QtCntH.Len(); i++) {
		QuoteH.GetDat(QtCntH.GetKey(i)).TmUrlCntV.Add(TQuote::TTmUrlCnt(PubTm, PostUrlId, QtCntH[i]));
  }

  // build in-link counts
  const TChA PostDomain = TStrUtil::GetDomNm2(PostUrlStr);
  for (int l = 0; l < LinkV.Len(); l++) {
    const TChA& Url = LinkV[l];
    if (TStrUtil::GetDomNm2(Url) == PostDomain) { continue; } // link inside same domain
		const int LinkStrId = AddStr(Url.CStr());
    UrlInDegH.AddDat(LinkStrId) += 1;
		UrlLkH.AddDat(TInt(PostUrlId)).AddKey(TInt(LinkStrId));
  }
}

PQuoteBs TQuoteBs::GetQuoteBs(const TIntV& QtIdV) const {
  PQuoteBs _NewQtBs = TQuoteBs::New();
  TQuoteBs& NewQtBs = *_NewQtBs;
  // add quotes
  for (int q = 0; q < QtIdV.Len(); q++) {
    const TQuote& Qt = GetQt(QtIdV[q]);
    NewQtBs.AddQuote(Qt, *this);
  }
  // add urls
  for (int u = 0; u < UrlInDegH.Len(); u++) {
    const char* UrlStr = GetStr(UrlInDegH.GetKey(u));
    if (NewQtBs.IsStr(UrlStr)) {
      NewQtBs.UrlInDegH.AddDat(NewQtBs.GetStrId(UrlStr), UrlInDegH[u]); }
  }
  // add url types
  for (int t = 0; t < UrlTyH.Len(); t++) {
    const char* UrlStr = GetStr(UrlTyH.GetKey(t));
    if (NewQtBs.IsStr(UrlStr)) {
      NewQtBs.UrlTyH.AddDat(NewQtBs.GetStrId(UrlStr), UrlTyH[t]); }
  }
  // add clusters
  for (int c = 0; c < GetClusts(); c++) {
    const int CId = GetCId(c);
    if ( ! IsQtId(CId)) { printf("!!! %d:%d\n", c, CId); continue; }
    const TIntV& CQtIdV = GetClust(CId);
    if (NewQtBs.IsQtId(CId)) {
      IAssert(! NewQtBs.ClustQtIdVH.IsKey(CId));
      TIntV& CIdV = NewQtBs.ClustQtIdVH.AddDat(CId);
      for (int i = 0; i < CQtIdV.Len(); i++) {
        if (NewQtBs.IsQtId(CQtIdV[i])) { CIdV.Add(CQtIdV[i]); }
      }
      IAssert(CIdV.Len() > 0);
    }
  }
  return _NewQtBs;
}

int TQuoteBs::GetQtsInClust() const {
  int qts = 0;
  for (int q = 0; q < ClustQtIdVH.Len(); q++) {
    qts += ClustQtIdVH[q].Len();
  }
  return qts;
}

int TQuoteBs::GetClustFq(const int& CId) const {
  int fq = 0;
  TIntSet S;
  const TIntV& ClustV = ClustQtIdVH.GetDat(CId);
  for (int c = 0; c < ClustV.Len(); c++) {
    fq += GetQt(ClustV[c]).GetFq();
    IAssert(! S.IsKey(ClustV[c]));
    S.AddKey((ClustV[c]));
  }
  return fq;
}

int TQuoteBs::GetClustFq(const int& CId, const TUrlTy& UrlTy) const {
  int fq = 0;
  const TIntV& ClustV = ClustQtIdVH.GetDat(CId);
  for (int c = 0; c < ClustV.Len(); c++) {
    fq += GetQt(ClustV[c]).GetFq(UrlTy, *this);
  }
  return fq;
}

int TQuoteBs::GetClustMxQtFq(const int& CId) const {
  int Mxfq = 0;
  const TIntV& ClustV = ClustQtIdVH.GetDat(CId);
  for (int c = 0; c < ClustV.Len(); c++) {
    int fq = GetQt(ClustV[c]).GetFq();
    if (fq > Mxfq){ Mxfq = fq; }
  }
  return Mxfq;
}

int TQuoteBs::GetCentrQtId(const int& CId) const {
  return GetCentrQtId(GetClust(CId));
}

// Find the representative quote of the cluster (most frequent quote in the cluster that is no fewer than 6 words)
int TQuoteBs::GetCentrQtId(const TIntV& ClustV) const {
  TIntPrV QtFqIdV;
  for (int c = 0; c < ClustV.Len(); c++) {
    if (! IsQtId(ClustV[c])) { continue; }
    const TQuote& Q = GetQt(ClustV[c]);
    QtFqIdV.Add(TIntPr(Q.GetUrls(), ClustV[c]));
  }
  if (QtFqIdV.Empty()) { return -1; }
  QtFqIdV.Sort(false);
  const TStr FqStr = GetQt(QtFqIdV[0].Val2).GetStr(); // most freq str or its superset
  for (int c = 0; c < QtFqIdV.Len(); c++) {
    IAssert(IsQtId(QtFqIdV[c].Val2));
    const TQuote& Q = GetQt(QtFqIdV[c].Val2);
    const int Words = TStrUtil::CountWords(Q.GetStr().CStr());
    if (Words >= 6 && Words < 50 && strstr(Q.GetStr().CStr(), FqStr.CStr())!=NULL) {
      return QtFqIdV[c].Val2; }
  }
  // just return most frequent quote
  return QtFqIdV[0].Val2;
}

// Find cluster centroid (create artificial cluster)
void TQuoteBs::GetMergedClustQt(const int& CId, TQuote& NewQt, const bool& OnlyAfterBegTm) const {
  const TIntV& ClustV = GetClust(CId);
  GetMergedClustQt(ClustV, NewQt, OnlyAfterBegTm);
}

void TQuoteBs::GetMergedClustQt(const TIntV& ClustV, TQuote& NewQt, const bool& OnlyAfterBegTm) const {
  const int CentrQtId = GetCentrQtId(ClustV);
  const TSecTm BegTm = OnlyAfterBegTm ? GetClustBegTm(ClustV, CentrQtId) : TSecTm(1);
  THash<TPair<TSecTm, TInt>, TInt> TmUrlCntH;
  for (int c = 0; c < ClustV.Len(); c++) {
    const TQuote& Q = GetQt(ClustV[c]);
    //IAssert(Q.GetTy() == qtInClust);
    for (int u = 0; u < Q.TmUrlCntV.Len(); u++) {
      if (Q.TmUrlCntV[u].Tm() >= BegTm) {
        TmUrlCntH.AddDat(TPair<TSecTm, TInt>(Q.TmUrlCntV[u].Tm(), Q.TmUrlCntV[u].UrlId())) += Q.TmUrlCntV[u].Cnt(); }
    }
  }
  NewQt.QtCIdTy = TQuote::TQtIdTy(-1, qtCentr);
  NewQt.QtStr = GetQt(CentrQtId).GetStr(); // get most frequent string
  //NewQt.QtStr = GetQt(CId).GetStr();     // get longest string
  if (! TmUrlCntH.Empty()) {
    NewQt.TmUrlCntV.Gen(TmUrlCntH.Len(), 0);
    for (int u = 0; u < TmUrlCntH.Len(); u++) {
      NewQt.TmUrlCntV.Add(TQuote::TTmUrlCnt(TmUrlCntH.GetKey(u).Val1, TmUrlCntH.GetKey(u).Val2(), TmUrlCntH[u]()));
    }
    NewQt.TmUrlCntV.Sort();
  }
}

// Get quote cluster begin time
TSecTm TQuoteBs::GetClustBegTm(const int& CId, const int& CentrQtId) const {
  const TIntV& ClustV = GetClust(CId);
  return GetClustBegTm(ClustV, CentrQtId);
}

TSecTm TQuoteBs::GetClustBegTm(const TIntV& ClustV, const int& CentrQtId) const {
  TStrHash<TInt> StrH;
  TIntV CntrWIdV, WIdV;
  int WIdV1Start, WIdV2Start, SkipId;
  TStrUtil::GetAddWIdV(StrH, GetQt(CentrQtId).GetStr().CStr(), CntrWIdV);
  TSecTm BegTm = TSecTm::GetCurTm();
  for (int c = 0; c < ClustV.Len(); c++) {
    TStrUtil::GetAddWIdV(StrH, GetQt(ClustV[c]).GetStr().CStr(), WIdV);
    if (WIdV.Len() < CntrWIdV.Len()) { continue; }
    const int Overlap = LongestCmnSubSq(CntrWIdV, WIdV, WIdV1Start, WIdV2Start, SkipId);
    if (Overlap >= CntrWIdV.Len() && SkipId==0) { // centroid is contained in the bigger quote
      if (GetQt(ClustV[c]).TmUrlCntV.Empty()) { continue; }
      BegTm = TMath::Mn(BegTm, GetQt(ClustV[c]).TmUrlCntV[0].Tm()); // take the earlier
    }
  }
  return BegTm;
}

// Finding the length of the approximate longest common subsequence
int TQuoteBs::LongestCmnSubSq(const TIntV& WIdV1, const TIntV& WIdV2, int& WIdV1Start, int& WIdV2Start, int& SkipId) {
  const TIntV& V1 = WIdV1.Len()>WIdV2.Len() ? WIdV1:WIdV2; // long
  const TIntV& V2 = WIdV1.Len()>WIdV2.Len() ? WIdV2:WIdV1; // short
  const int V1Len = V1.Len();
  const int V2Len = V2.Len();

  THash<TInt, TIntV> WIdPosH;
  THashSet<TInt> V2WIdSet;

  WIdV1Start = WIdV2Start = SkipId = 0;
  for (int i = 0; i < V2Len; i++) { // word position index
    V2WIdSet.AddKey(V2[i]); }
  for (int i = 0; i < V1Len; i++) { // word position index
    if (V2WIdSet.IsKey(V1[i])) { WIdPosH.AddDat(V1[i]).Add(i); } }

  // Count the sequence length
  int MaxLen = 0;
  for (int w = 0; w < V2Len; w++) {
    const int wid = V2[w];
    if (! WIdPosH.IsKey(wid)) { continue; }
    const TIntV& OccV = WIdPosH.GetDat(wid);
    for (int o = 0; o < OccV.Len(); o++) {
      const int beg = OccV[o];
      int cnt = 0, tmp = 0;
      while (w+cnt < V2Len && beg+cnt < V1Len && V2[w+cnt]==V1[beg+cnt]) { cnt++; tmp=0; }           // no skip
      while (beg+1+cnt < V1Len && w+cnt < V2Len && V2[w+cnt]==V1[beg+cnt+1]) { cnt++; tmp=-1; }      // skip word in long
      while (beg+cnt+1 < V1Len && w+cnt+1 < V2Len && V2[w+cnt+1]==V1[beg+cnt+1]) {  cnt++; tmp=-2;}  // skip word in both
      while (beg+cnt < V1Len && w+cnt+1 < V2Len && V2[w+cnt+1]==V1[beg+cnt]) { cnt++; tmp=-3;}       // skip word in short
      if (MaxLen < cnt) { MaxLen = cnt; SkipId=tmp; WIdV1Start = beg;  WIdV2Start = w; }
      IAssert(cnt >= 1);
    }
  }
  if (! (WIdV1.Len()>WIdV2.Len())) {
    int tmp=WIdV1Start; WIdV1Start=WIdV2Start; WIdV2Start=tmp;
  }
  return MaxLen;
}

void TQuoteBs::CreateClusters(const TVec<TIntV>& ClustV) {
  TIntV CIdV;
  ClustQtIdVH.Clr();
  // reset all quotes
  printf("len %d\n", Len());
  for (int q = 0; q < Len(); q++) {
    TQuote& Q = GetQt(q);
    Q.QtCIdTy = TQuote::TQtIdTy(q, qtQuote);
  }
  TIntH SeenNId;
  for (int c = 0; c < ClustV.Len(); c++) {
//    if (ClustV[c].Len() < 3) { continue; } // skip super small clusters
    // set cluster root
    const int CentrQtId = GetCentrQtId(ClustV[c]);
    if (CentrQtId == -1) {
      printf("Cluster %d of size %d has no root!!!\n", c, ClustV[c].Len());
      for (int i = 0; i < ClustV[c].Len(); i++) {
        printf("  %d%c", ClustV[c][i].Val, IsQtId(ClustV[c][i])?'t':'f'); } printf("\n");
      continue;
    }
    IAssert(! SeenNId.IsKey(CentrQtId));
    SeenNId.AddKey(CentrQtId);
    TQuote& Q = GetQt(CentrQtId);
    if (Q.GetTy() != qtQuote) { printf("  %d", Q.GetTy()); }
    IAssert(! ClustQtIdVH.IsKey(CentrQtId));
    Q.QtCIdTy = TQuote::TQtIdTy(CentrQtId, qtRoot);
    ClustQtIdVH.AddDat(CentrQtId, ClustV[c]);
    // point all members to the root
    for (int q = 0; q < ClustV[c].Len(); q++) {
      if (ClustV[c][q] == CentrQtId) { continue; }
      IAssert(! SeenNId.IsKey(ClustV[c][q]));
      SeenNId.AddKey(ClustV[c][q]);
      TQuote& Q2 = GetQt(ClustV[c][q]);
      Q2.QtCIdTy = TQuote::TQtIdTy(CentrQtId, qtInClust);
    }
  }
}

void TQuoteBs::ClusterQts(const int& MinRootWrdLen, const int& MinQtFq, const TStr& OutFNmPref, const TStrV& BlackListV) {
  printf("Cluster quotes with min Fq >= %d: %d total quotes\n", MinQtFq, Len());
  TExeTm ExeTm;
  TStrHash<TInt> StrH;
  // quote --> word id vector
  TVec<TPair<TInt, TIntV> > QWIdVV;  // (QtId, WIdV)
  { TIntPrV LenQIdV;  TIntV QWIdV;
  printf("sort qid by len\n");
  TStrHash<TInt> BlackListH;
  for (int i = 0; i < BlackListV.Len(); i++) { BlackListH.AddDatId(BlackListV[i].GetTrunc()); }
  printf("blacklist len: %d\n", BlackListH.Len());
  for (int q1 = 0; q1 < Len(); q1++) {
    const TQuote& Q = GetQtN(q1);
    const int Doms = Q.GetDoms(*this);
    if ((Q.GetTy()==qtQuote || Q.GetTy()==qtRoot) && Doms>1 && Doms*4>Q.GetUrls()
      && Q.GetFq() >= MinQtFq && (! BlackListH.IsKey(Q.GetStr().CStr()))) { // quote has not yet been merged (or is a root)
        LenQIdV.Add(TIntPr(TStrUtil::CountWords(Q.QtStr.CStr()), GetQtId(q1))); }
  }
  printf("sort %d candidates\n", LenQIdV.Len());
  LenQIdV.Sort(false);
  printf("get word id vectors\n");
  for (int q1 = 0; q1 < LenQIdV.Len(); q1++) {
    TStrUtil::GetAddWIdV(StrH, GetQt(LenQIdV[q1].Val2).QtStr.CStr(), QWIdV);
    QWIdVV.Add(TPair<TInt, TIntV>(LenQIdV[q1].Val2, QWIdV));
  } }
  printf("  %d root quotes\n", ClustQtIdVH.Len());
  printf("  %d quotes to merge\n", QWIdVV.Len());
  // cluster
  int NMergers=0;
  FILE *F = fopen(TStr(OutFNmPref+"-merged.txt").CStr(), "wt");
  const int ClusterQ=QWIdVV.Len();
  for (int q1 = 0; q1 < ClusterQ; q1++) {
    if (QWIdVV[q1].Val2.Len() < MinRootWrdLen) { continue; } // can't be a cluster root if your length < 6
    const int Qt1Id = QWIdVV[q1].Val1;
    for (int q2 = q1+1; q2 < ClusterQ; q2++) {
      int idx1=0, idx2=0, SkipTy=0;
      bool DoMerge = false;
      const int ShortLen = TMath::Mn(QWIdVV[q1].Val2.Len(), QWIdVV[q2].Val2.Len());
      if (ShortLen == 0) { continue; }
      const int Overlap = LongestCmnSubSq(QWIdVV[q1].Val2, QWIdVV[q2].Val2, idx1, idx2, SkipTy);
      if (ShortLen == 4 && Overlap == 4 /*&& SkipTy==0*/) { DoMerge=true; } // full overlap, no skip
      else if (ShortLen == 5 && Overlap == 5 /*&& SkipTy==0*/) { DoMerge=true; } // full overlap, no skip
      else if ((ShortLen == 6 && Overlap >= 5 /*&& SkipTy==0) || (ShortLen == 6 && Overlap == 5*/)) { DoMerge=true; }
      else if (Overlap/double(ShortLen+3) > 0.5 || Overlap > 10) { DoMerge=true; }
      if (DoMerge == true) {
        NMergers++;
        const int Qt2Id = QWIdVV[q2].Val1;
        TQuote& Q1 = GetQt(Qt1Id);
        TQuote& Q2 = GetQt(Qt2Id);
        if (Q1.GetTy() != qtRoot) { // create new cluster (set QtTy to qtRoot)
          IAssert(Q1.GetTy() == qtQuote);
          IAssert(! ClustQtIdVH.IsKey(Qt1Id));
          ClustQtIdVH.AddDat(Qt1Id).Add(Qt1Id);
          Q1.QtCIdTy = TQuote::TQtIdTy(Qt1Id, qtRoot);
        } else { IAssert(Q1.GetTy() == qtRoot); }
        // merge Q2 into Q1
        if (Q2.GetTy() == qtRoot) { // merge two clusters
          for (int c = 0; c < ClustQtIdVH.GetDat(Qt2Id).Len(); c++) {
            ClustQtIdVH.AddDat(Qt1Id).Add(ClustQtIdVH.GetDat(Qt2Id)[c]); }
          Q2.QtCIdTy = TQuote::TQtIdTy(Qt1Id, qtInClust);
          ClustQtIdVH.DelKey(Qt2Id);
          IAssert(ClustQtIdVH.AddDat(Qt1Id).IsIn(Qt2Id));
        } else {
          IAssert(Q2.GetTy() == qtQuote);
          Q2.QtCIdTy = TQuote::TQtIdTy(Qt1Id, qtInClust);
          ClustQtIdVH.AddDat(Qt1Id).Add(Qt2Id);
        }
        QWIdVV[q2].Val2.Clr(true); // matched
        // save
        { TStr Str= "    :"; if(SkipTy==-1){Str="long=";} else if(SkipTy==-2){Str="both=";} else if(SkipTy==-3){Str="shrt=";}
        if (Str.Len()>0) { printf("%c", Str[0]); }
        if (ClustQtIdVH.GetDat(Qt1Id).Len() == 2) { fprintf(F, "\n[%d] %s \t%d\n", TStrUtil::CountWords(Q1.QtStr.CStr()), Q1.QtStr.CStr(), Q1.GetFq()); }
        fprintf(F, "%s%d [%d] %s \t%d\n", Str.CStr(), Overlap, TStrUtil::CountWords(Q2.QtStr.CStr()), Q2.QtStr.CStr(), Q2.GetFq()); }
      }
    }
    if (ClustQtIdVH.IsKey(Qt1Id)) {
      ClustQtIdVH.GetDat(Qt1Id).Pack();
      ClustQtIdVH.GetDat(Qt1Id).Sort();
      QWIdVV[q1].Val2.Clr(true);
    }
    if (q1>0 && q1 % 100 == 0) {
      printf("\r  %d/%d: %d merged [%s]  ", q1, ClusterQ, NMergers, ExeTm.GetStr());
      if (q1 % 1000 == 0) { printf("save.\n");  fflush(F);
        TFOut FOut(OutFNmPref+"-QtBs.ClustQtIdVH"); ClustQtIdVH.Save(FOut); }
    }
  }
  fclose(F);
  printf("\n%d quotes, %d clusters [%s]\n\n.", NMergers, ClustQtIdVH.Len(), ExeTm.GetStr());
}

void TQuoteBs::ResetClusters() {
  ClustQtIdVH.Clr();
  for (int q = 0; q < Len(); q++) {
    GetQtN(q).QtCIdTy = TQuote::TQtIdTy(GetQtId(q), qtQuote);
  }
}

void TQuoteBs::ReassignToClust(const int& QtId, const int& NewCId) {
  const int QtCl = GetQt(QtId).GetCId();
  printf("assign %d from clust %d to %d\n", QtId, QtCl, NewCId);
  if (QtCl != QtId) {
    ClustQtIdVH.GetDat(QtCl).DelIfIn(QtId); }
  GetQt(QtId).QtCIdTy = TQuote::TQtIdTy(NewCId, qtInClust);
  ClustQtIdVH.AddDat(NewCId).Add(QtId);
}

void TQuoteBs::Mergec2Clusters(const int& ParentCId, const int& ChildCId) {
  IAssert(IsClust(ParentCId) && IsClust(ChildCId));
  printf("merging %d (fq: %d) to %d (%d fq)\n", ChildCId, GetClustFq(ChildCId), ParentCId, GetClustFq(ParentCId));
  TIntSet ClustSet;
  { const TIntV& ClustV = GetClust(ParentCId);
  for (int c = 0; c < ClustV.Len(); c++) {
    ClustSet.AddKey(ClustV[c]); } }
  { const TIntV& ClustV = GetClust(ChildCId);
  for (int c = 0; c < ClustV.Len(); c++) {
    ClustSet.AddKey(ClustV[c]); } }
  ClustSet.GetKeyV(ClustQtIdVH.GetDat(ParentCId));
  GetQt(ChildCId).QtCIdTy = TQuote::TQtIdTy(ChildCId, qtInClust);
  ClustQtIdVH.DelKey(ChildCId);
  ClustQtIdVH.Defrag();
}

PClustNet TQuoteBs::GetClustNet(const int& MinQtFq, const TStr& OutFNmPref) const {
  printf("Cluster quotes with Fq >= %d: %d total quotes\n", MinQtFq, Len());
  TExeTm ExeTm;
  TStrHash<TInt> StrH;
  FILE *F = fopen(TStr(OutFNmPref+"-candidates.txt").CStr(), "wt");
  // quote --> word id vector
  THash<TInt, TIntV> QWIdVH;  // QtId --> WIdV
  { TIntPrV LenQIdV;  TIntV QWIdV;
  printf("sort qid by len\n");
  for (int q1 = 0; q1 < Len(); q1++) {
    const TQuote& Q = GetQtN(q1);
    const int Doms = Q.GetDoms(*this);
    if ((Q.GetTy()!=qtCentr) && Doms>1 && Doms*4 > Q.GetUrls() && Q.GetFq() >= MinQtFq) { // quote has not yet been merged (or is a root)
      LenQIdV.Add(TIntPr(TStrUtil::CountWords(Q.QtStr.CStr()), GetQtId(q1)));
    }
  }
  printf("sort %d candidates by word length\n", LenQIdV.Len());
  LenQIdV.Sort(false);
  for (int q1 = 0; q1 < LenQIdV.Len(); q1++) {
    TStrUtil::GetAddWIdV(StrH, GetQt(LenQIdV[q1].Val2).QtStr.CStr(), QWIdV);
    QWIdVH.AddDat(LenQIdV[q1].Val2, QWIdV);
    fprintf(F, "%s\n", GetQt(LenQIdV[q1].Val2).QtStr.CStr());
  } }
  printf("  %d root quotes\n", ClustQtIdVH.Len());
  printf("  %d quotes to merge\n", QWIdVH.Len());
  // cluster
  const int ClusterQ=QWIdVH.Len();
  PClustNet ClustNet = TClustNet::New();
  TIntH NIdDepthH;
  TIntSet SeenQtIdSet;
  int NMerges=0, idx1=0, idx2=0, SkipTy=0; // TreeSz=0, TreeDepth=0,
  for (int q1 = 0; q1 < ClusterQ; q1++) { // for each quote
    SeenQtIdSet.Clr(false);
    const int Qt1Id = QWIdVH.GetKey(q1);
    for (int q2 = q1-1; q2 >= 0; q2--) { //  go over all shorter quotes
      const int Qt2Id = QWIdVH.GetKey(q2);
      if (SeenQtIdSet.IsKey(Qt2Id)) { continue; }
      bool DoMerge = false;
      const int Overlap = LongestCmnSubSq(QWIdVH[q1], QWIdVH[q2], idx1, idx2, SkipTy);
      const int ShortLen = QWIdVH[q1].Len(); IAssert(QWIdVH[q1].Len()<= QWIdVH[q2].Len());
      if (ShortLen == 4 && Overlap == 4) { DoMerge=true; } // full overlap, no skip
      else if (ShortLen == 5 && Overlap == 5) { DoMerge=true; } // full overlap, no skip
      else if (ShortLen == 6 && Overlap >= 5) { DoMerge=true; }
      else if (Overlap/double(ShortLen+3) > 0.5 || Overlap > 10) { DoMerge=true; }
      if (DoMerge==true) { NMerges++;
        if (! ClustNet->IsNode(Qt1Id)) { ClustNet->AddNode(Qt1Id, GetQt(Qt1Id)); }
        if (! ClustNet->IsNode(Qt2Id)) { ClustNet->AddNode(Qt2Id, GetQt(Qt2Id)); }
        //ClustNet->AddEdge(Qt1Id, Qt2Id, -1, Overlap);
        ClustNet->AddEdge(Qt1Id, Qt2Id);
        SeenQtIdSet.AddKey(Qt1Id);  SeenQtIdSet.AddKey(Qt2Id);
        /*TGAlg::GetSubTreeSz(ClustNet, Qt2Id, true, NIdDepthH, TreeSz, TreeDepth);
        for (int i = 0; i < NIdDepthH.Len(); i++) {
          SeenQtIdSet.AddKey(NIdDepthH.GetKey(i)); }*/
        fprintf(F, "%d Merge\t%d\t%d\ttree\t%d\n\t%s\n\t%s\n", q1, QWIdVH[q1].Len(), QWIdVH[q2].Len(), NIdDepthH.Len(), GetQt(Qt2Id).GetStr().CStr(), GetQt(Qt1Id).GetStr().CStr());
        fflush(F);
      }
    }
    if (q1>0 && q1 % 100 == 0) {
      printf("\r  %d/%d: %d merged [%s]  ", q1, ClusterQ, NMerges, ExeTm.GetStr());
      if (q1 % 1000 == 0) {
        printf("save: %d merges, %d nodes, %d edges in ClustNet [%s]\n.", NMerges, ClustNet->GetNodes(), ClustNet->GetEdges(), ExeTm.GetStr());
        TFOut FOut(OutFNmPref+".ClustNet"); ClustNet->Save(FOut); }
    }
  }
  fclose(F);
  printf("\n%d merges, %d nodes, %d edges in ClustNet [%s]\n\n.", NMerges, ClustNet->GetNodes(), ClustNet->GetEdges(), ExeTm.GetStr());
  return ClustNet;
}

void TQuoteBs::AddMergedQtsToQtBs() {
  printf("merge quote clusters and add them to the quote base\n");
  TQuote MergedQt;
  // delete ClustQtIdVH.Len() quotes with Fq == 1
  int toDel = ClustQtIdVH.Len();
  for (int q = 0; q < QuoteH.Len(); q++) {
    if (QuoteH[q].GetFq() == 1) { QuoteH.DelKeyId(q); toDel--; }
    if (toDel == 0) { break; }
  }
  // add merged quotes
  for (int q = 0; q < ClustQtIdVH.Len(); q++) {
    GetMergedClustQt(ClustQtIdVH.GetKey(q), MergedQt);
    IAssert(MergedQt.TmUrlCntV.Len() > 0); // quote string still points to
    QuoteH.AddDat(QuoteH.Len(), MergedQt); // add new quote
  }
  printf("IsKeyIdEqKeyN: %s\n", QuoteH.IsKeyIdEqKeyN()?"T":"F");
}

PQuoteBs TQuoteBs::GetMergeClusters(const bool& OnlyClusters) const {
  printf("Merging clusters into single quotes:\n");
  PQuoteBs NewQtBs = TQuoteBs::New();
  TQuote MergedQt;
  printf("  %d quotes total\n", Len());
  printf("  %d clusters\n", ClustQtIdVH.Len());
  TIntSet SeenQtIdSet;
  for (int q = 0; q < ClustQtIdVH.Len(); q++) {
    GetMergedClustQt(ClustQtIdVH.GetKey(q), MergedQt);
    IAssert(MergedQt.TmUrlCntV.Len() >0);
    NewQtBs->AddQuote(MergedQt, *this);
    const TIntV& ClustQtIdV = ClustQtIdVH[q];
    for (int c = 0; c < ClustQtIdV.Len(); c++) {
      const int qid = ClustQtIdV[c];
      IAssert(! SeenQtIdSet.IsKey(qid)); // each Qt is member of only one cluster
      SeenQtIdSet.AddKey(qid);
    }
  }
  if (! OnlyClusters) {
    for (int q = 0; q < Len(); q++) {
      const TQuote& Q = GetQtN(q);
      if (! SeenQtIdSet.IsKey(GetQtId(q))) {
        NewQtBs->AddQuote(Q, *this); }
    }
  }
  printf("  %d quotes in new Quotes base\n", NewQtBs->Len());
  return NewQtBs;
}

// Get domains by the number of top quotes they mention
void TQuoteBs::GetTopQtDoms(TStrIntPrV& DomCntV, const int& TakeNClust, const int& MinClFq, const TStr& RootHasWord, const TUrlTy& OnlyCountTy) const {
  TIntV CIdV;
  GetCIdVByFq(CIdV, MinClFq, RootHasWord, OnlyCountTy);
  THash<TStr, TInt> DomCntH;
  for (int c = 0; c < TMath::Mn(CIdV.Len(), TakeNClust); c++) {
    const TIntV& ClustV = GetClust(CIdV[c]);
    for (int q = 0; q < ClustV.Len(); q++) {
      const TQuote& Q = GetQt(ClustV[q]);
      for (int u = 0; u < Q.TmUrlCntV.Len(); u++) {
        DomCntH.AddDat(TStrUtil::GetDomNm(GetStr(Q.TmUrlCntV[u].UrlId()))) += 1;
      }
    }
  }
  DomCntH.SortByDat(false);
  DomCntH.GetKeyDatPrV(DomCntV);
}

void TQuoteBs::TopDomsByLag(const TTmUnit& TmUnit, const int& TakeNDoms, const int& TakeNClusts, const int& Thresh) const {
  THash<TStr, TMom> DomLagH;
  TIntV CIdV;  GetCIdVByFq(CIdV, 10, "", utUndef);
  //TIntV QIdV;  GetQIdVByFq(QIdV, 10, "", utUndef);
  TStrIntPrV DomCntV;  GetTopQtDoms(DomCntV, TakeNClusts, 10, "", utUndef);
  printf("Take %d clusters, %d domains", TakeNClusts, TakeNDoms);
  for (int d = 0; d < TMath::Mn(TakeNDoms, DomCntV.Len()); d++) {
    DomLagH.AddDat(DomCntV[d].Val1);
    printf("%d\t%s\n", DomCntV[d].Val2(), DomCntV[d].Val1.CStr());
  }
  int NQuotes=0;
  for (int c = 0; c < TMath::Mn(CIdV.Len(), TakeNClusts); c++) {
    TQuote Q;  GetMergedClustQt(CIdV[c], Q, false);
    const TSecTm MedTm = Q.GetMedianTm(TmUnit, utUndef, *this).Round(TmUnit);
    TStrSet DomSet;
    for (int u = 0; u < Q.TmUrlCntV.Len(); u++) {
      IAssert(IsStrId(Q.TmUrlCntV[u].UrlId()));
      const TStr Dom = TStrUtil::GetDomNm(GetStr(Q.TmUrlCntV[u].UrlId()));
      if (DomSet.IsKey(Dom)) { continue; } // only take first query occurence on the domain
      if (! DomLagH.IsKey(Dom)) { continue; }
      DomLagH.AddDat(Dom).Add((int(Q.TmUrlCntV[u].Tm().Round(TmUnit))-int(MedTm))/3600.0);
      DomSet.AddKey(Dom);
    }
    NQuotes += GetClust(CIdV[c]).Len();
  }
  FILE *F = fopen(TStr::Fmt("domsByLag-clust%d.tab", TakeNClusts).CStr(), "wt");
  fprintf(F, "Top clusters: %d, total quotes in top clusters: %d\n", TakeNClusts, NQuotes);
  TFltFltStrTrV MedLagDomV;
  for (int i = 0; i < DomLagH.Len(); i++) {
    DomLagH[i].Def();
    MedLagDomV.Add(TFltFltStrTr(DomLagH[i].GetMedian(), DomLagH[i].GetVals(), DomLagH.GetKey(i)));
  }
  MedLagDomV.Sort();
  fprintf(F, "AGGREGATE PEAK\nTop domains with least lag (out of top %d considered) that mention at least 10 pct of top %d quotes (clusters) :\n", TakeNDoms, TakeNClusts);
  for (int i = 0; i < MedLagDomV.Len(); i++) {
    if (MedLagDomV[i].Val2() < Thresh*TakeNClusts/100.0) { continue; }
    fprintf(F, "%g\t%g\t%s\n", MedLagDomV[i].Val1(), MedLagDomV[i].Val2(), MedLagDomV[i].Val3.CStr());
  }
  /*printf("Bottom 100 domains with least lag:\n");
  for (int i = 0; i < 100; i++) {
    printf("  %.2f\t%s\n", MedLagDomV[i].Val1(), MedLagDomV[i].Val2.CStr());
  }*/
  fclose(F);
}

void TQuoteBs::PlotQtFqCnt(const TStr& OutFNmPref) const {
  TIntH UrlCntH, DomCntH, CntFqH;
  THashSet<TChA> DomSet;
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote& Q = GetQtN(q);
    UrlCntH.AddDat(Q.GetUrls()) += 1;
    CntFqH.AddDat(Q.GetFq()) += 1;
    DomSet.Clr(false);
    for (int u = 0; u < Q.GetUrls(); u++) {
      DomSet.AddKey(TStrUtil::GetDomNm(GetStr(Q.TmUrlCntV[u].UrlId()))); }
    DomCntH.AddDat(DomSet.Len()) += 1;
  }
  TGnuPlot::PlotValCntH(UrlCntH, OutFNmPref+"-url", "", "number of urls quote appears at", "count", gpsLog10XY);
  TGnuPlot::PlotValCntH(DomCntH, OutFNmPref+"-dom", "", "number of domains quote appears at", "count", gpsLog10XY);
  TGnuPlot::PlotValCntH(CntFqH, OutFNmPref+"-fq", "", "number of times quote appears", "count", gpsLog10XY);
}

void TQuoteBs::PlotQtMediaVsBlogFq(const int& QtId, const TStr& OutFNmPref) const {
  TQuote::TTmFltPrV BlogFqV, MediaFqV, SmoothV;
  TFltPrV FqV;
  const TQuote& ClQt = GetQt(QtId);
  ClQt.GetFqOt(BlogFqV, tmu4Hour, utBlog, *this);
  ClQt.GetFqOt(MediaFqV, tmu4Hour, utMedia, *this);
  const TSecTm MinTm = ClQt.TmUrlCntV[0].Tm().Round(tmuDay);
  TGnuPlot GP("mediaVsBlogs-"+OutFNmPref, TStr::Fmt("%s. b:%d m:%d u:%d d:%d", ClQt.GetStr().CStr(),
    ClQt.GetFq(utBlog, *this), ClQt.GetFq(utMedia, *this), ClQt.GetUrls(), ClQt.GetDoms(*this)));
  TQuote::GetSmoothFqOt(SmoothV, BlogFqV, tmu4Hour, 24, 1.2, MinTm);
  for (int i = 0; i < SmoothV.Len(); i++) {
    FqV.Add(TFltPr(double(SmoothV[i].Val1-MinTm)/(24.0*3600.0), SmoothV[i].Val2())); }
  GP.AddPlot(FqV, gpwLines, "Blog frequency");
  TQuote::GetSmoothFqOt(SmoothV, MediaFqV, tmu4Hour, 24, 1.2, MinTm);
  FqV.Clr();
  for (int i = 0; i < SmoothV.Len(); i++) {
    IAssert(SmoothV[i].Val1>=MinTm);
    FqV.Add(TFltPr(double(SmoothV[i].Val1-MinTm)/(24.0*3600.0)*(54.0/46.0), SmoothV[i].Val2()));
  }
  GP.AddPlot(FqV, gpwLines, "Media frequency");
  GP.SetXYLabel(TStr::Fmt("time [days] since %s", MinTm.GetYmdTmStr().CStr()), "Quote frequency (normalized for the baseline 46 vs 54)");
  GP.SavePng();
}

int NormMaxTo1(const TQuote::TTmFltPrV& TmFqV, TFltPrV& OutV, const TSecTm& BegTm) {
  int Idx=0;
  double MaxVal=0;
  for (int i = 0; i < TmFqV.Len(); i++) {
    if (MaxVal < TmFqV[i].Val2) {
      MaxVal=TmFqV[i].Val2; Idx = i; }
  }
  OutV.Gen(TmFqV.Len(), 0);
  for (int i = 0; i < TmFqV.Len(); i++) {
    OutV.Add(TFltPr(double(TmFqV[i].Val1-BegTm)/(24.0*3600.0), TmFqV[i].Val2())); //MaxVal));
  }
  return Idx;
}

void TQuoteBs::PlotClustMediaVsBlogFq(const int& CId, const TStr& OutFNmPref) const {
  const TTmUnit TmUnit = tmu4Hour;
  TQuote::TTmFltPrV BlogFqV, MediaFqV, AllFqV, SmoothV;
  TFltPrV FqV, RawFqV;
  TQuote ClQt;
  GetMergedClustQt(CId, ClQt, false);
  ClQt.GetFqOt(BlogFqV, TmUnit, utBlog, *this);
  ClQt.GetFqOt(MediaFqV, TmUnit, utMedia, *this);
  const TSecTm MinTm = ClQt.TmUrlCntV[0].Tm().Round(tmuDay);
  const TSecTm  MediaPeak = ClQt.GetPeakTm(TmUnit, TSecTm(1), utMedia, *this);
  const TSecTm BlogPeak = ClQt.GetPeakTm(TmUnit, TSecTm(1), utBlog, *this);
  const int Lag = (int(BlogPeak.Round(TmUnit))-int(MediaPeak.Round(TmUnit)))/3600;
  const bool Is1Peak = ClQt.IsSinglePeak(TmUnit, TSecTm(1), utUndef, *this);
  TGnuPlot GP("mediaVsBlogs-"+OutFNmPref, TStr::Fmt("%s. b:%d m:%d u:%d d:%d", Is1Peak?"SINGLE PEAK": "MORE PEAKS", //ClQt.GetStr().CStr(),
    Lag, ClQt.GetFq(utBlog, *this), ClQt.GetFq(utMedia, *this), ClQt.GetUrls(), ClQt.GetDoms(*this)));
  // blogs
  int mx = NormMaxTo1(BlogFqV, RawFqV, MinTm);
  //GP.AddPlot(RawFqV, gpwLines, TStr::Fmt("RAW Blog : max %s, %s", BlogFqV[mx].Val1.GetYmdTmStr().CStr(), BlogPeak.GetYmdTmStr().CStr()), "pt 7 ps 1");
  TQuote::GetSmoothFqOt(SmoothV, BlogFqV, TmUnit, 2*24, 1.2, MinTm);
  mx = NormMaxTo1(SmoothV, FqV, MinTm);
  GP.AddPlot(FqV, gpwLines, TStr::Fmt("SMOOTH Blog : max %s", SmoothV[mx].Val1.GetYmdTmStr().CStr()), "pt 7 ps 1");
  const TSecTm SmBlog = SmoothV[mx].Val1;
  // media
  mx = NormMaxTo1(MediaFqV, RawFqV, MinTm);
  //GP.AddPlot(RawFqV, gpwLines, TStr::Fmt("RAW Media : max %s, %s", MediaFqV[mx].Val1.GetYmdTmStr().CStr(), MediaPeak.GetYmdTmStr().CStr()), "pt 5 ps 1");
  TQuote::GetSmoothFqOt(SmoothV, MediaFqV, TmUnit, 2*24, 1.2, MinTm);
  mx = NormMaxTo1(SmoothV, FqV, MinTm);
  const TSecTm SmMed = SmoothV[mx].Val1;
  const int SmLag = (int(SmBlog.Round(TmUnit))-int(SmMed.Round(TmUnit)))/3600;
  GP.AddPlot(FqV, gpwLines, TStr::Fmt("SMOOTH Media is behind: %dh, smooth: %dh.", -Lag, -SmLag), "pt 5 ps 1");
  //ClQt.GetFqOt(AllFqV, TmUnit);
  //NormMaxTo1(AllFqV, FqV, MinTm);
  //GP.AddPlot(FqV, gpwLines, "ALL", "pt 5 ps 1");
  GP.SetXYLabel(TStr::Fmt("time [days] since %s", MinTm.GetYmdTmStr().CStr()), "Cluster frequency (normalized for the baseline 46 vs 54)");
  GP.AddCmd("set xtics 10");
  GP.AddCmd("set mxtics 10");
  GP.SavePng();
}

void TQuoteBs::PlotMediaVsBlogLag(const TTmUnit& TmUnit, const bool& TakeClusters, const int& TakeN, const TStr& OutFNmPref) const {
  TQuote Qt;  TIntV IdV;
  TMom LagMom1d, LagMom2d, LagMom3d, LagMom4d, LagMom7d;
  TFltFltH LagCntH, LagBCntH, LagMCntH;//, LagFqH, LagMedFq, LagBlogFq;
  int Cnt=0;
  if (TakeClusters) { GetCIdVByFq(IdV, 10, "", utUndef, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", utUndef); }
  for (int c = 0; c < TakeN; c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    //if (! Qt.IsSinglePeak(tmu4Hour, TSecTm(1), utMedia, *this)) { continue; }
    //if (! Qt.IsSinglePeak(tmu4Hour, TSecTm(1), utBlog, *this)) { continue; }
    //const TSecTm  MediaPeak = Qt.GetPeakTm(TmUnit, TSecTm(1), utMedia, *this);
    //const TSecTm BlogPeak = Qt.GetPeakTm(TmUnit, TSecTm(1), utBlog, *this);
    //const TSecTm  MediaPeak = Qt.GetMeanTm(TmUnit, utMedia, *this);
    //const TSecTm BlogPeak = Qt.GetMeanTm(TmUnit, utBlog, *this);
    const TSecTm  AllPeak = Qt.GetMedianTm(TmUnit, utUndef, *this);
    const TSecTm BlogPeak = Qt.GetMedianTm(TmUnit, utBlog, *this);
    const TSecTm  MediaPeak = Qt.GetMedianTm(TmUnit, utMedia, *this);
    const double Lag = (int(BlogPeak.Round(TmUnit))-int(MediaPeak.Round(TmUnit)))/3600.0;
    const double Lag1 = (int(BlogPeak.Round(TmUnit))-int(AllPeak.Round(TmUnit)))/3600.0;
    const double Lag2 = (int(MediaPeak.Round(TmUnit))-int(AllPeak.Round(TmUnit)))/3600.0;
    if (fabs(Lag) < 1*24) { LagMom1d.Add(Lag); }
    if (fabs(Lag) < 2*24) { LagMom2d.Add(Lag); }
    if (fabs(Lag) < 3*24) { LagMom3d.Add(Lag); }
    if (fabs(Lag) < 4*24) { LagMom4d.Add(Lag); }
    if (fabs(Lag) < 7*24) { LagMom7d.Add(Lag); }
    LagCntH.AddDat(Lag) += 1;
    LagBCntH.AddDat(Lag1) += 1;
    LagMCntH.AddDat(Lag2) += 1;
    /*LagFqH.AddDat(Lag, Qt.GetFq());
    LagMedFq.AddDat(Lag, Qt.GetFq(utMedia, *this));
    LagBlogFq.AddDat(Lag, Qt.GetFq(utBlog, *this));*/
    //if (TakeClusters) { PlotClustMediaVsBlogFq(IdV[c], TStr::Fmt("%s-%02d", OutFNmPref.CStr(), c+1)); }
    //else { PlotQtMediaVsBlogFq(IdV[c], TStr::Fmt("%s-%02d", OutFNmPref.CStr(), c+1)); }
    Cnt++;
  }
  TFltPrV PrV;
  LagMom1d.Def();  LagMom2d.Def();  LagMom3d.Def();  LagMom4d.Def();  LagMom7d.Def();
  { TGnuPlot GP("lagBlogsVsMedia-"+OutFNmPref, TStr::Fmt("Lag: 1D: %.2f %g;   2D: %.2f  %g;   3D: %.2f  %g;   4D: %.2f  %g;   7D: %.2f  %g",
    LagMom1d.GetMean(), LagMom1d.GetMedian(), LagMom2d.GetMean(), LagMom2d.GetMedian(), LagMom3d.GetMean(), LagMom3d.GetMedian(),
    LagMom4d.GetMean(), LagMom4d.GetMedian(), LagMom7d.GetMean(), LagMom7d.GetMedian()), true);
  LagBCntH.GetKeyDatPrV(PrV); PrV.Sort(); GP.AddPlot(PrV, gpwLinesPoints, TStr::Fmt("%s. b:%d m:%d u:%d d:%d BLOG", Qt.GetStr().CStr(),
    Qt.GetFq(utBlog, *this), Qt.GetFq(utMedia, *this), Qt.GetUrls(), Qt.GetDoms(*this)));
  LagMCntH.GetKeyDatPrV(PrV); PrV.Sort(); GP.AddPlot(PrV, gpwLinesPoints, TStr::Fmt("%s. b:%d m:%d u:%d d:%d MEDIA", Qt.GetStr().CStr(),
    Qt.GetFq(utBlog, *this), Qt.GetFq(utMedia, *this), Qt.GetUrls(), Qt.GetDoms(*this)));
  GP.AddCmd("set xrange[-24:24]\nset mxtics 5\nset xtics 5");
  GP.SetXYLabel("media lag in hours (+: news lag, -: news lead)", "count");
  GP.SavePng(); }
  /*{ TGnuPlot GP("lagScatterBlogs-"+OutFNmPref, "");
  LagBlogFq.GetKeyDatPrV(PrV);  GP.AddPlot(PrV, gpwPoints);
  GP.SetXYLabel("lag [hours]", "frequency (blogs)"); GP.SavePng(); }
  { TGnuPlot GP("lagScatterMedia-"+OutFNmPref, "");
  LagMedFq.GetKeyDatPrV(PrV);  GP.AddPlot(PrV, gpwPoints);
  GP.SetXYLabel("lag [hours]", "frequency (media)"); GP.SavePng(); }
  { TGnuPlot GP("lagScatter-"+OutFNmPref, "");
  LagFqH.GetKeyDatPrV(PrV);  GP.AddPlot(PrV, gpwPoints);
  GP.SetXYLabel("lag [hours]", "frequency (all)"); GP.SavePng(); }*/
  printf("\nTake top %d clusters. TmUnit: %s\n", TakeN, TTmInfo::GetTmUnitStr(TmUnit).CStr());
  printf("  1d:\tavg:\t%f\tmed:\t%f\n", LagMom1d.GetMean(), LagMom1d.GetMedian());
  printf("  2d:\tavg:\t%f\tmed:\t%f\n", LagMom2d.GetMean(), LagMom2d.GetMedian());
  printf("  3d:\tavg:\t%f\tmed:\t%f\n", LagMom3d.GetMean(), LagMom3d.GetMedian());
  printf("  4d:\tavg:\t%f\tmed:\t%f\n", LagMom4d.GetMean(), LagMom4d.GetMedian());
  printf("  7d:\tavg:\t%f\tmed:\t%f\n", LagMom7d.GetMean(), LagMom7d.GetMedian());

}

void TQuoteBs::PlotFqDecay(const TTmUnit& TmUnit, const bool& TakeClusters, const TUrlTy& CntUrlTy, const int& PlotN, const int& MinValsPerTm, const TStr& OutFNmPref) const {
  THash<TFlt, TMom> MomH;
  TQuote Qt;
  TIntV IdV;
  TGnuPlot GP("decay-"+OutFNmPref);
  if (TakeClusters) { GetCIdVByFq(IdV, 10, "", CntUrlTy, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", CntUrlTy); }
  for (int c = 0; c < PlotN; c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    //if (! Qt.IsSinglePeak(TmUnit, TSecTm(0), CntUrlTy, *this)) { printf("."); continue; }
    TQuote::TTmFltPrV QtV;  TFltPrV TmV;
    Qt.GetFqOt(QtV, TmUnit);
    //Qt.GetSmoothFqOt(QtV, TmUnit, 48, 1.4);
    int pos=0;  double val=0.0;
    for (int i = 0; i < QtV.Len(); i++) {
      if (val < QtV[i].Val2) { pos = i;  val = QtV[i].Val2; }
    }
    const double PeakVal = val;
    const TSecTm PeakTm = QtV[pos].Val1;
    for (int i = 0; i < QtV.Len(); i++) {
      TmV.Add(TFltPr((double(QtV[i].Val1)-double(PeakTm))/(24.0*3600), QtV[i].Val2/PeakVal));
      MomH.AddDat(TmV.Last().Val1).Add(TmV.Last().Val2);
    }
    const TStr Label = "";//TStr::Fmt("%s %d %d %d", Qt.GetStr().CStr(), Qt.GetFq(), Qt.GetUrls(), Qt.GetDoms(*this));
    //GP.AddPlot(TmV, gpwLines, Label, "lt 0");
  }
  TFltPrV AvgV, MedV;
  MomH.SortByKey();
  for (int i = 0; i < MomH.Len(); i++) {
    MomH[i].Def();
    if (MomH[i].GetVals()< MinValsPerTm) { printf("."); continue; }
    AvgV.Add(TFltPr(MomH.GetKey(i), MomH[i].GetMean()));
    MedV.Add(TFltPr(MomH.GetKey(i), MomH[i].GetMedian()));
  }
  GP.AddPlot(AvgV, gpwLines, "Average", "lt 2 lw 2");
  GP.AddPlot(MedV, gpwLines, TStr::Fmt("Median (last qt: %d %d %d)", Qt.GetFq(), Qt.GetUrls(), Qt.GetDoms(*this)), "lt 1 lw 2");
  GP.SetXYLabel("time [days]", "count");
  //GP.AddCmd("set nokey");
  GP.AddCmd("set mxtics 6");
  GP.AddCmd("set xtics 1");
  GP.AddCmd("set yrange [0:1]");
  GP.AddCmd("set xrange [-5:5]");
  GP.AddCmd("set yzeroaxis lt -1");
  GP.SavePng();
}

void NormPeakAt1(THash<TInt, TFlt>& H) {
  double PeakFq=1;
  for (int i = 0; i < H.Len(); i++) {
    PeakFq=TMath::Mx((double)H[i], PeakFq); }
  for (int i = 0; i < H.Len(); i++) {
    H[i]/=PeakFq; }
}

void TQuoteBs::PlotBlogVsMediaFqOt(const TTmUnit& TmUnit, const bool& TakeClusters, int PlotN, const TStr& OutFNmPref) const {
  THash<TInt, TFlt> Peak1AllH, Peak2AllH, Peak3AllH;
  THash<TInt, TFlt> Peak1MedH, Peak2MedH, Peak3MedH;
  THash<TInt, TFlt> Peak1BlogH, Peak2BlogH, Peak3BlogH;
  TQuote Qt;
  TIntV IdV;
  TInt FqAtPeak;
  if (TakeClusters) { GetCIdVByFq(IdV, 100, "", utUndef, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", utUndef); }
  for (int c = 0; c < TMath::Mn(PlotN, IdV.Len()); c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    if (! Qt.IsSinglePeak(TmUnit, TSecTm(0), utUndef, *this)) { PlotN++; printf("."); continue; }
    TQuote::TTmFltPrV AllFqV, BlogFqV, MedFqV;  TFltPrV TmV;
    const double Peak1 = Qt.GetPeakTm(TmUnit, TSecTm(1), FqAtPeak).GetAbsSecs(); // peak
    const double Peak2 = Qt.GetMeanTm(TmUnit, utUndef, *this).GetAbsSecs();    // mean
    const double Peak3 = Qt.GetMedianTm(TmUnit, utUndef, *this).GetAbsSecs();  // median
    double PeakFq = FqAtPeak;
    Qt.GetFqOt(AllFqV, TmUnit, utUndef, *this);
    Qt.GetFqOt(MedFqV, TmUnit, utMedia, *this);
    Qt.GetFqOt(BlogFqV, TmUnit, utBlog, *this);
    /*for (int i = 0; i < AllFqV.Len(); i++) {
      const int T1 = (int) ((double(AllFqV[i].Val1.GetAbsSecs())-Peak1)/(3600.0));
      const int T2 = (int) ((double(AllFqV[i].Val1.GetAbsSecs())-Peak2)/(3600.0));
      const int T3 = (int) ((double(AllFqV[i].Val1.GetAbsSecs())-Peak3)/(3600.0));
      Peak1AllH.AddDat(T1) += AllFqV[i].Val2;
      Peak2AllH.AddDat(T2) += AllFqV[i].Val2;
      Peak3AllH.AddDat(T3) += AllFqV[i].Val2;
    }*/
    PeakFq=1;
    for (int i = 0; i < MedFqV.Len(); i++) { PeakFq=TMath::Mx((double)MedFqV[i].Val2(), PeakFq); }
    for (int i = 0; i < MedFqV.Len(); i++) {
      const int T1 = (int) TMath::Round((double(MedFqV[i].Val1.GetAbsSecs())-Peak1)/(3600.0));
      const int T2 = (int) TMath::Round((double(MedFqV[i].Val1.GetAbsSecs())-Peak2)/(3600.0));
      const int T3 = (int) TMath::Round((double(MedFqV[i].Val1.GetAbsSecs())-Peak3)/(3600.0));
      Peak1MedH.AddDat(T1) += MedFqV[i].Val2/PeakFq;
      Peak2MedH.AddDat(T2) += MedFqV[i].Val2/PeakFq;
      Peak3MedH.AddDat(T3) += MedFqV[i].Val2/PeakFq;
    }
    PeakFq=1;
    for (int i = 0; i < BlogFqV.Len(); i++) { PeakFq=TMath::Mx((double)BlogFqV[i].Val2(), PeakFq); }
    for (int i = 0; i < BlogFqV.Len(); i++) {
      const int T1 = (int) TMath::Round((double(BlogFqV[i].Val1.GetAbsSecs())-Peak1)/(3600.0));
      const int T2 = (int) TMath::Round((double(BlogFqV[i].Val1.GetAbsSecs())-Peak2)/(3600.0));
      const int T3 = (int) TMath::Round((double(BlogFqV[i].Val1.GetAbsSecs())-Peak3)/(3600.0));
      Peak1BlogH.AddDat(T1) += BlogFqV[i].Val2/PeakFq;
      Peak2BlogH.AddDat(T2) += BlogFqV[i].Val2/PeakFq;
      Peak3BlogH.AddDat(T3) += BlogFqV[i].Val2/PeakFq;
    }
  }
  NormPeakAt1(Peak1AllH); NormPeakAt1(Peak2AllH); NormPeakAt1(Peak3AllH);
  NormPeakAt1(Peak1MedH); NormPeakAt1(Peak2MedH); NormPeakAt1(Peak3MedH);
  NormPeakAt1(Peak1BlogH); NormPeakAt1(Peak2BlogH); NormPeakAt1(Peak3BlogH);
  TGnuPlot::PlotValCntH(Peak1AllH, "ALL", Peak1MedH, "MEDIA", Peak1BlogH, "BLOG", "peakMax-"+OutFNmPref, "Max is the peak", "Time [h]", "Count (sum of frequencies of all quotes)");
  //TGnuPlot::PlotValCntH(Peak2AllH, "ALL", Peak2MedH, "MEDIA", Peak2BlogH, "BLOG", "peakAvg-"+OutFNmPref, "Average is the peak", "Time [h]", "Count (sum of frequencies of all quotes)");
  TGnuPlot::PlotValCntH(Peak3AllH, "ALL", Peak3MedH, "MEDIA", Peak3BlogH, "BLOG", "peakMed-"+OutFNmPref, "Median is the peak", "Time [h]", "Count (sum of frequencies of all quotes)");
  //TGnuPlot::PlotValCntH(Peak1AllH, "MAX PEAK", Peak2AllH, "AVG PEAK", Peak3BlogH, "MEDIAN PEAK", "peakAll-"+OutFNmPref, "ALL urls", "Time [h]", "Count (sum of frequencies of all quotes)");
  //TGnuPlot::PlotValCntH(Peak1MedH, "MAX PEAK", Peak2MedH, "AVG PEAK", Peak3BlogH, "MEDIAN PEAK", "peakMedia-"+OutFNmPref, "MEDIA urls", "Time [h]", "Count (sum of frequencies of Med quotes)");
  //TGnuPlot::PlotValCntH(Peak1BlogH, "MAX PEAK", Peak2BlogH, "AVG PEAK", Peak3BlogH, "MEDIAN PEAK", "peakBlog-"+OutFNmPref, "BLOG urls", "Time [h]", "Count (sum of frequencies of Blog quotes)");
}

void TQuoteBs::PlotBlogFracOt(const TTmUnit& TmUnit, const bool& TakeClusters, const int& PlotN, const TStr& OutFNmPref) const {
  TQuote Qt;
  TIntV IdV;
  THash<TFlt, TMom> TmMomH;
  THash<TFlt, TFltPr> TmBmH;
  if (TakeClusters) { GetCIdVByFq(IdV, 10, "", utUndef, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", utUndef); }
  for (int c = 0; c < PlotN; c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    TQuote::TTmFltPrV MedQtV, BlgQtV;
    Qt.GetFqOt(MedQtV, TmUnit, utMedia, *this);
    Qt.GetFqOt(BlgQtV, TmUnit, utBlog, *this);
    TFltPrV FracV;
    const int PeakTm = Qt.GetPeakTm(TmUnit, TSecTm(1), utUndef, *this);
    //const int PeakTm = Qt.GetMeanTm(TmUnit, utUndef, *this);
    //const int PeakTm = Qt.GetMedianTm(TmUnit, utUndef, *this);
    for (int m=0, b=0; m < MedQtV.Len(); m++) {
      while (b<BlgQtV.Len() && BlgQtV[b].Val1 < MedQtV[m].Val1) { b++; }
      if (b<BlgQtV.Len() && BlgQtV[b].Val1 == MedQtV[m].Val1) {
        const double Tm = (int(MedQtV[m].Val1.Round(TmUnit))-PeakTm)/(24*3600.0);
        //if (BlgQtV[b].Val2>1 && MedQtV[m].Val2>1) {
          const double Frac = BlgQtV[b].Val2/double(BlgQtV[b].Val2+MedQtV[m].Val2);
          FracV.Add(TFltPr(Tm, Frac));
          TmMomH.AddDat(Tm).Add(Frac);
        //}[
        TFltPr& BM = TmBmH.AddDat(Tm);
        BM.Val1+= BlgQtV[b].Val2;
        BM.Val2+= MedQtV[m].Val2;
      }
    }
    //TGnuPlot::PlotValV(FracV, TStr::Fmt("fqFrac-%s-%02d", OutFNmPref.CStr(), c+1), "", "time [days]", "fraction of blog mentions");
  }
  { TFltPrV PrV;
  for (int b = 0; b < TmMomH.Len(); b++) {
    TmMomH[b].Def();
    PrV.Add(TFltPr(TmMomH.GetKey(b), TmMomH[b].GetMean())); }
  TGnuPlot GP(TStr::Fmt("fqFracA-%s", OutFNmPref.CStr()));
  PrV.Sort();  GP.AddPlot(PrV, gpwLinesPoints);
  GP.AddCmd("set xrange [-7:7]\nset yzeroaxis lt -1"); GP.SavePng(); }
  { TFltPrV PrV;
  for (int b = 0; b < TmMomH.Len(); b++) {
    PrV.Add(TFltPr(TmMomH.GetKey(b), TmMomH[b].GetMedian())); }
  TGnuPlot GP(TStr::Fmt("fqFracM-%s", OutFNmPref.CStr()));
  PrV.Sort();  GP.AddPlot(PrV, gpwLinesPoints);
  GP.AddCmd("set xrange [-7:7]\nset yzeroaxis lt -1"); GP.SavePng(); }
  { TFltPrV PrV;
  for (int b = 0; b < TmBmH.Len(); b++) {
    PrV.Add(TFltPr(TmBmH.GetKey(b), TmBmH[b].Val1/(TmBmH[b].Val1+TmBmH[b].Val2))); }
  TGnuPlot GP(TStr::Fmt("fqFracS-%s", OutFNmPref.CStr()));
  PrV.Sort(); GP.AddPlot(PrV, gpwLinesPoints);
  GP.AddCmd("set xrange [-7:7]\nset yzeroaxis lt -1"); GP.SavePng(); }
}

void TQuoteBs::PlotPopularityCnt(const bool& TakeClusters, const TUrlTy& UrlTy, const int& PlotN, const TStr& OutFNmPref) const {
  TIntV IdV;
  TIntH FqCntH;
  TQuote Qt;
  //if (TakeClusters) { GetCIdVByFq(IdV, 0, "", UrlTy, false); }
  //else { GetQtIdVByFq(IdV, 0, 0, false, "", UrlTy); }
  /*for (int c = 0; c < TMath::Mn(PlotN, IdV.Len()); c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    FqCntH.AddDat(Qt.GetFq(UrlTy, *this)) += 1;
  }*/
  for (int c = 0; c < Len(); c++) {
    const TQuote& Qt = GetQtN(c);
    if (Qt.GetStr().IsStrIn("lipstick") && Qt.GetStr().IsStrIn("pig")) {
      FqCntH.AddDat(Qt.GetFq(UrlTy, *this)) += 1; }
  }
  TIntPrV FqCntV, CdfV;
  FqCntH.GetKeyDatPrV(FqCntV);  FqCntV.Sort();
  TGUtil::GetCCdf(FqCntV, CdfV);
  TGnuPlot::PlotValV(FqCntV, "pop-"+OutFNmPref, "", "total frequency", "count", gpsLog10XY);
  TGnuPlot::PlotValV(CdfV, "popCDF-"+OutFNmPref+"", "", "total frequency", "NCDF", gpsLog10XY);
}

void TQuoteBs::PlotEmptyY(const TTmUnit& TmUnit, const bool& TakeClusters, const int& PlotN, const TStr& OutFNmPref) const {
  TQuote Qt;
  TIntV IdV;
  THash<TInt, TInt> TmCumCntH;
  int MaxY=0;
  if (TakeClusters) { GetCIdVByFq(IdV, 10, "", utUndef, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", utUndef); }
  for (int c = 0; c < PlotN; c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    TQuote::TTmFltPrV QtV;
    Qt.GetFqOt(QtV, TmUnit);
    for (int i = 0; i < QtV.Len(); i++) {
      const int Tm = QtV[i].Val1.Round(tmu1Hour)/3600;
      TInt& CumCnt = TmCumCntH.AddDat(Tm);
      CumCnt += (int) QtV[i].Val2;
      MaxY = TMath::Mx(CumCnt(), MaxY);
    }
  }
  TmCumCntH.SortByKey(true);
  const int BegHr = TmCumCntH.GetKey(0);
  THash<TFlt, TFlt> EmptyH;
  TFltPrV TmV;
  for (int t = 0; t < TmCumCntH.Len(); t++) {
    printf("%d  ", TmCumCntH[t]());
    for (int y = TmCumCntH[t]; y < MaxY; y+=1) {
      EmptyH.AddDat(y) += 1; }
    TmV.Add(TFltPr((TmCumCntH.GetKey(t)-BegHr)/24.0, TmCumCntH[t].Val));
  }
  EmptyH.SortByKey(false);
  TFltPrV EmptyV;
  EmptyV.Add(TFltPr(EmptyH.GetKey(0), EmptyH[0]));
  for (int i = 1; i < EmptyH.Len(); i++) {
    EmptyV.Add(TFltPr(EmptyH.GetKey(i), EmptyH[i]+EmptyV.Last().Val2));
  }
  TGnuPlot::PlotValCntH(EmptyH, "emptyF-"+OutFNmPref+"-d", "Amount of empty area above the cummulative query-frequency curve", "y value", "area at y");
  TGnuPlot::PlotValV(EmptyV, "emptyF-"+OutFNmPref+"-c", "Amount of empty area above the cummulative query-frequency curve", "y value", "area above y");
  TGnuPlot::PlotValV(TmV, "emptyF1"+OutFNmPref, "Cummulative query frequency over time", "time (days)", "frequency");
}

// Load dataset and return a hiperlink graph between documents that contain quotes
// node ids are string url ids from QtBs
PNGraph TQuoteBs::GetQuotePostNet(const TStr& DatasetFNm) const {
  PNGraph Graph = TNGraph::New();
  for (TMemesDataLoader Memes(DatasetFNm); Memes.LoadNext(); ) {
    if (Memes.MemeV.Empty()) { continue; }
    if (! IsStr(Memes.PostUrlStr.CStr())) { continue; }
    const int SrcNId = GetStrId(Memes.PostUrlStr.CStr());
    for (int u = 0; u < Memes.LinkV.Len(); u++) {
      if (! IsStr(Memes.LinkV[u].CStr())) { continue; }
      const int DstNId = GetStrId(Memes.LinkV[u].CStr());
      if (! Graph->IsNode(SrcNId)) { Graph->AddNode(SrcNId); }
      if (! Graph->IsNode(DstNId)) { Graph->AddNode(DstNId); }
      Graph->AddEdge(SrcNId, DstNId);
    }
  }
  TSnap::PrintInfo(Graph);
  return Graph;
}

PQtDomNet TQuoteBs::GetQuoteDomNet(const PNGraph& PostGraph, const int& CId) const {
  TQuote Qt;  GetMergedClustQt(CId, Qt, false);
  PQtDomNet DomG = TQtDomNet::New();
  TIntSet UrlSet;
  for (int i = 0; i < Qt.GetTimes(); i++) { UrlSet.AddKey(Qt.GetUrlId(i)); }
  TStrSet DomH;
  for (int i = 0; i < Qt.GetTimes(); i++) {
    const int url = Qt.GetUrlId(i);
    if (! PostGraph->IsNode(url)) { continue; }
    const int dom = DomH.AddKey(TStrUtil::GetDomNm2(GetStr(url)));
    TNGraph::TNodeI NI = PostGraph->GetNI(url);
    for (int o = 0; o < NI.GetOutDeg(); o++) {
      if (! UrlSet.IsKey(NI.GetOutNId(o))) { continue; }
      const int url2 = NI.GetOutNId(o);
      const int dom2 = DomH.AddKey(TStrUtil::GetDomNm2(GetStr(url2)));
      if (dom==dom2) { continue; }
      if (! DomG->IsNode(dom)) {
        DomG->AddNode(dom, TPair<TStr, TInt>(DomH[dom], GetUrlTy(url))); }
      if (! DomG->IsNode(dom2)) {
        DomG->AddNode(dom2, TPair<TStr, TInt>(DomH[dom2], GetUrlTy(url2))); }
      if (DomG->IsEdge(dom, dom2)) {
        DomG->GetEDat(dom, dom2) += 1; }
      else {
        DomG->AddEdge(dom, dom2, 1); }
    }
  }
  return DomG;
}

void TQuoteBs::SaveQuotes(const int& MinQtFq, const TStr& OutFNm) const {
  TIntV QtIdV;
  GetQtIdVByFq(QtIdV, 0, MinQtFq);
  SaveQuotes(QtIdV, OutFNm);
}

void TQuoteBs::SaveQuotes(const TIntV& QtIdV, const TStr& OutFNm) const {
  TIntSet QtIdSet;
  FILE *F = fopen(OutFNm.CStr(), "wt");
  printf("saving %d quotes\n", QtIdV.Len());
  fprintf(F, "#Freq\tUrls\tDomains\tQuote\n");
  for (int q = 0; q < QtIdV.Len(); q++) {
    const int QtId = QtIdV[q];
    if (QtIdSet.IsKey(QtId)) { continue; } // save each quote only once
    QtIdSet.AddKey(QtId);
    const TQuote& Q = GetQt(QtId);
    //fprintf(F, "\n[%d] %d=%d %s \t%d\t%d\t%d\n", TStrUtil::CountWords(Q.QtStr.CStr()), QtId, Q.GetCId(), Q.QtStr.CStr(), Q.GetFq(), Q.GetUrls(), Q.GetDoms(*this));
  fprintf(F, "%d\t%d\t%d\t%s\n", Q.GetFq(), Q.GetUrls(), Q.GetDoms(*this), Q.QtStr.CStr());
    if (Q.GetTy() == qtRoot || Q.GetTy() == qtCentr) { // cluster root or centroid
      IAssert(ClustQtIdVH.IsKey(Q.GetCId()));
      const TIntV& ClustV = ClustQtIdVH.GetDat(Q.GetCId());
      for (int i = 0; i < ClustV.Len(); i++) {
        const TQuote& Q1 = GetQt(ClustV[i]);
        fprintf(F, "\t[%d] %d=%d %s\t%d\t%d\t%d\n", TStrUtil::CountWords(Q1.QtStr.CStr()),
          //ClustV[i], GetQtId(Q1.GetStr().CStr()),
          ClustV[i](), Q1.GetCId(), Q1.QtStr.CStr(), Q1.GetFq(), Q1.GetUrls(), Q1.GetDoms(*this));
        QtIdSet.AddKey(ClustV[i]);
      }
    }
  }
  fclose(F);
}

// Save: root qt, all members, all urls tms for each memeber
void TQuoteBs::SaveClusters(const TStr& OutFNm, const bool& SkipUrls) const {
  TIntPrV FqCIdV;
  TIntH ClSzCntH, ClFqCntH, ClMxQtFqCntH;
  for (int c = 0; c < GetClusts(); c++) {
    const int CId = GetCId(c);
    if (! IsQtId(CId)) { printf("!!! %d:%d\n", c, CId); continue; }
    FqCIdV.Add(TIntPr(GetClustFq(CId), CId));
    ClSzCntH.AddDat(GetClust(CId).Len()) += 1;
    ClFqCntH.AddDat(GetClustFq(CId)) += 1;
	  ClMxQtFqCntH(GetClustMxQtFq(CId)) += 1;
  }
  TGnuPlot::PlotValCntH(ClSzCntH, OutFNm+"-clSz", TStr::Fmt("%d clusters", GetClusts()), "Number of quotes in the cluster", "Number of clusters", gpsLog);
  TGnuPlot::PlotValCntH(ClFqCntH, OutFNm+"-clFq", TStr::Fmt("%d clusters", GetClusts()), "Volume of quotes in the cluster", "Number of clusters", gpsLog, false, gpwLinesPoints, false, false);
  TGnuPlot::PlotValCntH(ClFqCntH, OutFNm+"-clFqB", TStr::Fmt("%d clusters", GetClusts()), "Volume of quotes in the cluster", "Number of clusters", gpsLog, false, gpwLinesPoints, false, true);
  TGnuPlot::PlotValCntH(ClMxQtFqCntH, OutFNm+"-clMxQtFq", TStr::Fmt("%d clusters", GetClusts()), "Volume of the most frequent quote in the cluster", "Number of clusters", gpsLog, false, gpwLinesPoints, false, false);
  TGnuPlot::PlotValCntH(ClMxQtFqCntH, OutFNm+"-clMxQtFqB", TStr::Fmt("%d clusters", GetClusts()), "Volume of the most frequent quote in the cluster", "Number of clusters", gpsLog, false, gpwLinesPoints, false, true);

  FqCIdV.Sort(false);
  TIntPrV QtFqV;
  FILE *F = fopen(TStr::Fmt("%s-clust.txt", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "format:\n<ClSz>\t<TotFq>\t<Root>\t<ClusterId>\n");
  fprintf(F, "\t<QtFq>\t<Urls>\t<QtStr>\t<QuteId>\n");
  fprintf(F, "\t\t<Tm>\t<Fq>\t<UrlTy>\t<Url>\n\n");
  for (int c = 0; c < FqCIdV.Len(); c++) {
    const int CId = FqCIdV[c].Val2;
    const TIntV& ClustV = GetClust(CId);
    fprintf(F, "\n%d\t%d\t%s\t%d\n", ClustV.Len(), FqCIdV[c].Val1(), GetQt(CId).GetStr().CStr(), CId);
    QtFqV.Clr(false);
    for (int q = 0; q < ClustV.Len(); q++) {
      IAssert(IsQtId(ClustV[q]));
      QtFqV.Add(TIntPr(GetQt(ClustV[q]).GetUrls(), ClustV[q])); }
    QtFqV.Sort(false);
    for (int q = 0; q < QtFqV.Len(); q++) {
      IAssert(IsQtId(QtFqV[q].Val2));
      const TQuote& Qt = GetQt(QtFqV[q].Val2);
      if ((! SkipUrls) && q > 0) { fprintf(F, "\n"); }
      fprintf(F, "\t%d\t%d\t%s\t%d\n", Qt.GetFq(), Qt.GetUrls(), Qt.GetStr().CStr(), QtFqV[q].Val2());
      if (! SkipUrls) {
        for (int u = 0; u < Qt.GetUrls(); u++) {
          fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(),
            GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", GetStr(Qt.TmUrlCntV[u].UrlId())); }
      }
    }
  }
  fclose(F);
}

// Save: root qt, all members, all urls tms for each memeber
void TQuoteBs::SaveClusters(const TIntV& QtIdV, const TStr& OutFNm, const bool& SkipUrls) const {
  FILE *F = fopen(TStr::Fmt("%s-clust.txt", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "format:\n<ClSz>\t<TotFq>\t<Root>\t<ClusterId>\n");
  fprintf(F, "\t<QtFq>\t<Urls>\t<QtStr>\t<QuteId>\n");
  fprintf(F, "\t\t<Tm>\t<Fq>\t<UrlTy>\t<Url>\n\n");
  for (int c = 0; c < QtIdV.Len(); c++) {
    int CId = QtIdV[c];
    if (! IsClust(CId)) { CId = GetQt(CId).GetCId(); }
    const TIntV& ClustV = GetClust(CId);
    fprintf(F, "\n%d\t%d\t%s\t%d\n", ClustV.Len(), GetClustFq(CId), GetQt(CId).GetStr().CStr(), CId);
    for (int q = 0; q < ClustV.Len(); q++) {
      const TQuote& Qt = GetQt(ClustV[q]);
      if ((! SkipUrls) && q > 0) { fprintf(F, "\n"); }
      fprintf(F, "\t%d\t%d\t%s\t%d\n", Qt.GetFq(), Qt.GetUrls(), Qt.GetStr().CStr(), ClustV[q]());
      if (! SkipUrls) {
        for (int u = 0; u < Qt.GetUrls(); u++) {
          fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(),
            GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", GetStr(Qt.TmUrlCntV[u].UrlId())); }
      }
    }
  }
  fclose(F);
}

// Add merged clusters to quote base
void TQuoteBs::AddMergedClusters() {
	for (int i = 0; i < QuoteH.Len(); i++) {
		const TQuote& Qt = GetQt(i);
		const int QtId = i;
		if (Qt.GetTy() == qtRoot) {
			int MxQtId = QuoteH.Len();
			TQuote& CentrQt = QuoteH.AddDat(MxQtId);
			GetMergedClustQt(QtId, CentrQt, false);
			
			// Remove duplicates : same domain, time stamp within 5 minutes
			TVec<TQuote::TTmUrlCnt> RecV;
			THash<TChA, TSecTm> DomTmH;
			for (int j = 0; j < CentrQt.TmUrlCntV.Len(); j++) {
				TChA UrlStr = TChA(GetStr(CentrQt.TmUrlCntV[j].UrlId()));
				TSecTm CurTm = CentrQt.TmUrlCntV[j].Tm();
				TChA DomNm = TStrUtil::GetDomNm2(UrlStr);
				if (DomNm.IsPrefix(".")) DomNm = DomNm.GetSubStr(1, TInt::Mx);
				if (!DomTmH.IsKey(DomNm) || (CurTm.GetAbsSecs() - DomTmH.GetDat(DomNm).GetAbsSecs() > 30 * 60)) {
					DomTmH.AddDat(DomNm) = CurTm;
					RecV.Add(CentrQt.TmUrlCntV[j]);
				}
			}
			CentrQt.TmUrlCntV = RecV;
		}
	}
	printf("Add Merge clusters DONE\n");
}

// save: merged quotes with url
void TQuoteBs::SaveMergedClusters(const TStr& OutFNm) {
	TIntPrV FqCIdV;
	for (int i = 0; i < QuoteH.Len(); i++) {
		const TQuote& Qt = GetQt(i);
		if (Qt.GetTy() != qtCentr)
			continue;
		FqCIdV.Add(TIntPr(Qt.GetUrls(),i));
	}	
	printf("Total Quote clusters : %d\n", FqCIdV.Len());
  FqCIdV.Sort(false);
  TIntPrV QtFqV;
  FILE *F = fopen(TStr::Fmt("%s-clust.txt", OutFNm.CStr()).CStr(), "wt");
	fprintf(F, "format:\n<ClSz>\t<TotFq>\t<Root>\t<ClusterId>\n");
	fprintf(F, "\t\t<Tm>\t<Fq>\t<UrlTy>\t<Url>\n\n");

	// Stats
	StatsH.AddDat("Links in cluster") = 0; StatsH.AddDat("Urls in cluster") = 0;
	for (int i = 0; i < UrlInDegH.Len(); i++)
		StatsH.GetDat("Links in cluster") += UrlInDegH[i];
	for (int i = 0; i < QuoteH.Len(); i++) {
		const TQuote& Qt = QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		StatsH.GetDat("Urls in cluster") += Qt.GetUrls();
	}

  for (int c = 0; c < FqCIdV.Len(); c++) {
		const int CId = FqCIdV[c].Val2;
		const TQuote& Qt = GetQt(CId);
		//TNodeNet<TSecTm> LkG; GetClustLkGraph(LkG, Qt);
		fprintf(F, "\n%d\t%s\n", FqCIdV[c].Val1(), Qt.GetStr().CStr());
		for (int u = 0; u < Qt.GetUrls(); u++) {
			fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
				GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", GetStr(Qt.TmUrlCntV[u].UrlId()));
			//TNodeNet<TSecTm>::TNodeI NI = LkG.GetNI(Qt.TmUrlCntV[u].UrlId());
			//for (int e = 0; e < NI.GetOutDeg(); e++) {
			//	int LkUrlId = NI.GetOutNId(e);
			//	fprintf(F, "\t\t\t %s", GetStr(LkUrlId));
			//}
			//if (NI.GetOutDeg() > 0) fprintf(F, "\n");
			if (!UrlLkH.IsKey(Qt.TmUrlCntV[u].UrlId())) 
				continue;
			TIntSet& LkSet = UrlLkH.GetDat(Qt.TmUrlCntV[u].UrlId());
			fprintf(F, "\t\t\t%d\t", LkSet.Len());
			for (int i = 0; i < LkSet.Len(); i++) {
				int LkUrlId = LkSet[i];
				fprintf(F, "%s\t", GetStr(LkUrlId));
			}			
			fprintf(F, "\n");
		}

  }
  fclose(F);

}

void TQuoteBs::SaveBigBlogMassQt(const TStr& OutFNm) const {
  const TTmUnit TmUnit = tmuDay;
  const bool TakeClusters = true;
  TIntV IdV;
  TQuote Qt;
  int Candidates=0;
  if (TakeClusters) { GetCIdVByFq(IdV, 10, "", utUndef, false); }
  else { GetQtIdVByFq(IdV, 8, 10, false, "", "", utUndef); }
  TVec<TPair<TFlt, TStr> > ScoreV;
  for (int c = 0; c < IdV.Len(); c++) {
    if (TakeClusters) { GetMergedClustQt(IdV[c], Qt, false); }
    else { Qt = GetQt(IdV[c]); }
    //if (TStrUtil::CountWords(Qt.GetStr().CStr()) < 10) { continue; }
    const int MediaPeak = Qt.GetPeakTm(TmUnit, TSecTm(1), utMedia, *this); // media peak
    //const double BlogFq = Qt.GetFq(TSecTm(1), TSecTm(MediaPeak-7*24*3600), utBlog, *this);  // Fq 7 week before the media peak
    //const double TotFq = Qt.GetFq();
    const double BlogFq = Qt.GetUrls(TSecTm(MediaPeak-21*24*3600), TSecTm(MediaPeak-7*24*3600), utBlog, *this);  // Fq 7 week before the media peak
    const double TotFq = Qt.GetUrls();
    //const double Score = BlogFq/TotFq;
    if (BlogFq < 10) { continue; }
    Candidates++;
    if (BlogFq < TotFq*0.15 || BlogFq > TotFq*0.75) { continue; }
    //if (Score > 0.5) { continue; }
    ScoreV.Add(TFltStrPr(TotFq, TStr::Fmt("%g\t%g\t%s", BlogFq, TotFq, Qt.GetStr().CStr())));
    /*if (TotFq > 500) {
      TFltFltH MediaCntH, BlogCntH;
      for (int i = 0; i < Qt.TmUrlCntV.Len(); i++) {
        const double Hr = (double(Qt.TmUrlCntV[i].Tm().Round(tmuDay))-double(MediaPeak))/(24*3600.0);
        if (GetUrlTy(Qt.TmUrlCntV[i].UrlId()) == utMedia) { MediaCntH.AddDat(Hr) += Qt.TmUrlCntV[i].Cnt(); }
        else { BlogCntH.AddDat(Hr) += Qt.TmUrlCntV[i].Cnt(); }
      }
      TGnuPlot GP(TStr::Fmt("qt-%05d", int(TotFq)), Qt.GetStr());
      TFltPrV MV, BV; MediaCntH.GetKeyDatPrV(MV); BlogCntH.GetKeyDatPrV(BV); MV.Sort(); BV.Sort();
      GP.AddPlot(MV, gpwLinesPoints, "MEDIA");
      GP.AddPlot(BV, gpwLinesPoints, "BLOGS");
      GP.SavePng();
    }*/
  }
  printf("Considered: %d quotes\n", IdV.Len());
  printf("            %d candidates\n", Candidates);
  printf("            %d selected\n", ScoreV.Len());
  ScoreV.Sort(false);
  FILE *F = fopen(TStr::Fmt("%s.txt", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "total %d items\n", IdV.Len());
  fprintf(F, "items above blog threshdolg %d\n", ScoreV.Len());
  fprintf(F, "counts are numbers of urls (not actually number of occurences)\n");
  fprintf(F, "\nblog\ttotal\tQuote\n");
  for (int i = 0; i < ScoreV.Len(); i++) {
    fprintf(F, "%s\n", ScoreV[i].Val2.CStr());
  }
  fclose(F);
}

// Save for flash visualization:
//  File: <date_hour> <quote_id> <raw_freq> <smooth_freq>
//  File  <qoote_id> <quote> <quote_url>
void TQuoteBs::SaveForFlash(const TIntV& QtIdV, const TStr& OutFNm, const TTmUnit& TmUnit, int SaveN, const TSecTm& BegTm, const TSecTm& EndTm, const TUrlTy& OnlyCountTy) const {
  const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
  //TVec<TQuad<TInt, TInt, TInt, TInt> > QtInfoH; // QtId --> (peak time, QtStr, take url from quote QtId, FqOverTmV)
  TVec<TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV> > QtInfoV, QtInfoV2; // (PeakTm, CentrQtId, TotFq, RawFqOt)
  TQuote::TTmFltPrV FqV, SmoothFqV;
  TIntV VizQtIdV;
  if (SaveN == -1) { SaveN = TInt::Mx; }
  TSecTm MinTm=TSecTm(TInt::Mx-1), MaxTm=TSecTm(1);
  //TIntSet QSet, CSet;
  printf("Saving top %d out of %d quotes\n", SaveN, QtIdV.Len());
  THash<TInt, TIntPr> WeekPeakQtIdH; // top quote of each week
  for (int q = 0; q < QtIdV.Len() && QtInfoV2.Len() < (SaveN+100); q++) { // take top SaveN
    const TQuote& Qt = GetQt(QtIdV[q]);
    if (Qt.GetTy() == qtQuote) {
      Qt.GetFqOt(FqV, TmUnit, BegTm, EndTm, OnlyCountTy, *this);
      if (FqV.Empty()) { continue; }
      QtInfoV2.Add(TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV>((double)Qt.GetPeakTm(TmUnit, BegTm)+1e-6*Qt.GetUrls(), QtIdV[q], Qt.GetFq(), FqV));
      MinTm = TMath::Mn(MinTm, FqV[0].Val1);
      MaxTm = TMath::Mx(MaxTm, FqV.Last().Val1);
      VizQtIdV.Add(QtIdV[q]);
      TInt PeakFq;
      const int PeakTm = Qt.GetPeakTm(tmuWeek, BegTm, PeakFq).GetAbsSecs();
      if (! WeekPeakQtIdH.IsKey(PeakTm)) {
        WeekPeakQtIdH.AddDat(PeakTm, TIntPr(PeakFq, QtInfoV2.Len()-1)); }
      else if (WeekPeakQtIdH.GetDat(PeakTm).Val1<PeakFq) {
        WeekPeakQtIdH.AddDat(PeakTm, TIntPr(PeakFq, QtInfoV2.Len()-1)); }
    } else { // quote cluster
      const int CId = Qt.GetCId();
      if (! ClustQtIdVH.IsKey(CId)) { continue; }
      const int CentrQtId = GetCentrQtId(CId);
      TQuote CentrQt;  GetMergedClustQt(CId, CentrQt, true);
      //printf("%d\t%s\n", CId, CentrQt.GetStr().CStr());
      CentrQt.GetFqOt(FqV, TmUnit, BegTm, EndTm, OnlyCountTy, *this);
      if (FqV.Empty()) { continue; }
      QtInfoV2.Add(TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV>((double)CentrQt.GetPeakTm(TmUnit, BegTm)+1e-6*CentrQt.GetUrls(), CentrQtId, GetClustFq(CId), FqV));
      TInt PeakFq;
      const int PeakTm = CentrQt.GetPeakTm(tmuWeek, BegTm, PeakFq).GetAbsSecs();
      if (! WeekPeakQtIdH.IsKey(PeakTm)) {
        WeekPeakQtIdH.AddDat(PeakTm, TIntPr(PeakFq, QtInfoV2.Len()-1)); }
      else if (WeekPeakQtIdH.GetDat(PeakTm).Val1<PeakFq) {
        WeekPeakQtIdH.AddDat(PeakTm, TIntPr(PeakFq, QtInfoV2.Len()-1)); }
      MinTm = TMath::Mn(MinTm, FqV[0].Val1);
      MaxTm = TMath::Mx(MaxTm, FqV.Last().Val1);
      VizQtIdV.Add(CentrQtId);
    }
  }
  // make sure that for each week there is a quote that peaks
  { TIntSet QtIdSet;
  // first take top quite of that week
  for (int i = 0; i < WeekPeakQtIdH.Len(); i++) {
    //QtInfoV.Add(QtInfoV2[WeekPeakQtIdH[i].Val2]);
    //QtIdSet.AddKey(WeekPeakQtIdH[i].Val2);
  }
  QtInfoV.Clr(true);
  // fill with other top quotes
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV2.Len()); i++) {
    if (QtIdSet.IsKey(i)) { continue; }
    QtInfoV.Add(QtInfoV2[i]);
  printf("%d  ", (int) QtInfoV2[i].Val3()); //(PeakTm, CentrQtId, TotFq, RawFqOt)
    if (QtInfoV.Len() > SaveN) { break; }
  } } // */
  // make sure there are only K quotes active at a time
  THash<TSecTm, TIntFltH> TmCntH; // for each time, get all quote Ids and frequencies
  for (int i = 0; i < QtInfoV.Len(); i++) {
    const TQuote::TTmFltPrV& FqV = QtInfoV[i].Val4;
    for (int t = 0; t < FqV.Len(); t++) {
      TmCntH.AddDat(FqV[t].Val1).AddDat(i, FqV[t].Val2); }
  }
  for (int i = 0; i < TmCntH.Len(); i++) {
    TmCntH[i].SortByDat(false); }
  const int K = 10;
  const int Slack = 3*24*3600; // give it additional 3 days
  THash<TInt, TSecTm> QtLastTopH; // for each quote last time they were in top K
  for (int i = 0; i < TmCntH.Len(); i++) {
    TIntFltH& TmH = TmCntH[i];
    for (int j = 0; j < TMath::Mn(K, TmH.Len()); j++) {
      QtLastTopH.AddDat(TmH.GetKey(j), TmCntH.GetKey(i));
    }
  }
  // delete all appearances after the quote moves out of top K
  for (int i = 0; i < QtInfoV.Len(); i++) {
    TQuote::TTmFltPrV& FqV = QtInfoV[i].Val4;
    const TSecTm LastTime = QtLastTopH.GetDat(i);
    int x = 0; for (x = 0; x < FqV.Len() && FqV[x].Val1 <= LastTime+Slack; x++) { }
    if (x < FqV.Len()) { FqV.Del(x, FqV.Len()-1); }
  } // */
  printf("save from %d quotes\n", QtInfoV.Len());
  QtInfoV.Sort();
  TIntH QtIdUrlH;  GetQtPageUrl(VizQtIdV, QtIdUrlH);
  // save quote info
  FILE *F = fopen(TStr::Fmt("%sQ.tab", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV[i].Val2)), (int) QtInfoV[i].Val3);
  }
  fclose(F);
  printf("saveT\n");
  // save quote time frequency
  F = fopen(TStr::Fmt("%sT.tab", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV[i].Val4;
    //TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.2, MinTm, MaxTm); // less smoothing
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smoothing
    //SmoothFqV = RawFqV;
    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
    //if (QtIdV[N] == MaxStepsQId) { // add zero counts to fill the interval [MinTm, MaxTm]
    //  for (TSecTm Tm = SmoothFqV.Last().Val1; Tm.GetInUnits(TmUnit) < MaxTm.GetInUnits(TmUnit); Tm += TmUnitSecs) {
    //    if (TmUnit == tmuDay) { fprintf(F, "%s", Tm.GetDtYmdStr().CStr()); }
    //    else { fprintf(F, "%s_%02d", Tm.GetDtYmdStr().CStr(), Tm.GetHourN()); }
    //    fprintf(F, "\t%d\t0\t0\n", SaveN-i); } }
  }
  fclose(F);
  printf("done.\n");
}


// save for flash visualization for top 10, 20, 30, 50, 100 quotes:
//  File: <date_hour> <quote_id> <raw_freq> <smooth_freq>
//  File  <qoote_id> <quote> <quote_url>
void TQuoteBs::SaveForFlashAll(const TIntV& QtIdV, const TStr& OutFNm, const TTmUnit& TmUnit, const TSecTm& BegTm, const TSecTm& EndTm, const TUrlTy& OnlyCountTy) const {
  const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
  TVec<TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV> > QtInfoV, QtInfoV2; // (PeakTm, CentrQtId, TotFq, RawFqOt)
  TQuote::TTmFltPrV FqV, SmoothFqV;
  TIntV VizQtIdV;
  int SaveN = 100;
  TSecTm MinTm=TSecTm(TInt::Mx-1), MaxTm=TSecTm(1);
  printf("Saving top %d out of %d quotes\n", SaveN, QtIdV.Len());

  for (int q = 0; q < QtIdV.Len() && QtInfoV2.Len() < (SaveN+100); q++) { // take top SaveN
		const TQuote& Qt = GetQt(QtIdV[q]);
		Assert(Qt.GetTy() != qtQuote); // Must be quote cluster centoid
		const int CId = Qt.GetCId();
		if (! ClustQtIdVH.IsKey(CId)) { continue; }
		// Merge quotes in the cluster
		const int CentrQtId = GetCentrQtId(CId);
		TQuote CentrQt;  GetMergedClustQt(CId, CentrQt, false); //Was true originally
		CentrQt.GetFqOt(FqV, TmUnit, BegTm, EndTm, OnlyCountTy, *this);

		if (FqV.Empty()) { continue; }
		QtInfoV2.Add(TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV>((double)CentrQt.GetPeakTm(TmUnit, BegTm)+1e-6*CentrQt.GetUrls(), CentrQtId, GetClustFq(CId), FqV));
		MinTm = TMath::Mn(MinTm, FqV[0].Val1);
		MaxTm = TMath::Mx(MaxTm, FqV.Last().Val1);
		VizQtIdV.Add(CentrQtId);
  }
  // make sure that for each week there is a quote that peaks
  { TIntSet QtIdSet;
  QtInfoV.Clr(false);
  // fill with other top quotes
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV2.Len()); i++) {
    if (QtIdSet.IsKey(i)) { continue; }
    QtInfoV.Add(QtInfoV2[i]);
    printf("%d  ", (int) QtInfoV2[i].Val3()); //(PeakTm, CentrQtId, TotFq, RawFqOt)
    if (QtInfoV.Len() > SaveN) { break; }
  } }

  // make sure there are only K quotes active at a time
  THash<TSecTm, TIntFltH> TmCntH; // for each time, get all quote Ids and frequencies
  for (int i = 0; i < QtInfoV.Len(); i++) {
    const TQuote::TTmFltPrV& FqV = QtInfoV[i].Val4;
    for (int t = 0; t < FqV.Len(); t++) {
      TmCntH.AddDat(FqV[t].Val1).AddDat(i, FqV[t].Val2); }
  }
  for (int i = 0; i < TmCntH.Len(); i++) {
    TmCntH[i].SortByDat(false); }
  //const int K = 10;
  //const int Slack = 3*24*3600; // give it additional 3 days
  //THash<TInt, TSecTm> QtLastTopH; // for each quote last time they were in top K
  //for (int i = 0; i < TmCntH.Len(); i++) {
  //  TIntFltH& TmH = TmCntH[i];
  //  for (int j = 0; j < TMath::Mn(K, TmH.Len()); j++) {
  //    QtLastTopH.AddDat(TmH.GetKey(j), TmCntH.GetKey(i));
  //  }
  //}
  //// delete all appearances after the quote moves out of top K
  //for (int i = 0; i < QtInfoV.Len(); i++) {
  //  TQuote::TTmFltPrV& FqV = QtInfoV[i].Val4;
  //  const TSecTm LastTime = QtLastTopH.GetDat(i);
  //  int x = 0; for (x = 0; x < FqV.Len() && FqV[x].Val1 <= LastTime+Slack; x++) { }
  //  if (x < FqV.Len()) { FqV.Del(x, FqV.Len()-1); }
  //} // */
  printf("save from %d quotes\n", QtInfoV.Len());
  QtInfoV.Sort();
// Generate files for flash, for top 10, 20, 30, 50, 100 quote clusters 
  TVec<TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV> > QtInfoV10, QtInfoV20, QtInfoV30, QtInfoV50;
  for (int i = 0; i < QtInfoV.Len(); i++) {
	  TQuad<TFlt, TInt, TInt, TQuote::TTmFltPrV> QtInfo = QtInfoV[i];
	  int cnt = 0;
	  for (int j = 0; j < QtInfoV.Len(); j++) 
		  if (QtInfoV[j].Val3 > QtInfo.Val3) 
			  cnt++;
	  if (cnt < 10) QtInfoV10.Add(QtInfo);
	  if (cnt < 20) QtInfoV20.Add(QtInfo);
	  if (cnt < 30) QtInfoV30.Add(QtInfo);
	  if (cnt < 50) QtInfoV50.Add(QtInfo);
  }

  SaveN = 10;
  TIntH QtIdUrlH;  GetQtPageUrl(VizQtIdV, QtIdUrlH);
  FILE *F = fopen("top10Q.tab", "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV10.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV10[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV10[i].Val2)), (int) QtInfoV10[i].Val3);
  }
  fclose(F);
  // save quote time frequency
  F = fopen("top10T.tab", "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV10.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV10[i].Val4;
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smootning

    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
  }
  fclose(F);
  printf("Top %d Done.\n", SaveN);

  SaveN = 20;
  // save quote info
  F = fopen("top20Q.tab", "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV20.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV20[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV20[i].Val2)), (int) QtInfoV20[i].Val3);
  }
  fclose(F);
  printf("saveT\n");
  // save quote time frequency
  F = fopen("top20T.tab", "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV20.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV20[i].Val4;
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smootning

    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
  }
  fclose(F);
  printf("Top %d Done.\n", SaveN);

  SaveN = 30;
  // save quote info
  F = fopen("top30Q.tab", "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV30.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV30[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV30[i].Val2)), (int) QtInfoV30[i].Val3);
  }
  fclose(F);
  printf("saveT\n");
  // save quote time frequency
  F = fopen("top30T.tab", "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV30.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV30[i].Val4;
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smootning

    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
  }
  fclose(F);
  printf("Top %d Done.\n", SaveN);

  SaveN = 50;
  // save quote info
  F = fopen("top50Q.tab", "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV50.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV50[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV50[i].Val2)), (int) QtInfoV50[i].Val3);
  }
  fclose(F);
  printf("saveT\n");
  // save quote time frequency
  F = fopen("top50T.tab", "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV50.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV50[i].Val4;
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smootning

    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
  }
  fclose(F);
  printf("Top %d Done.\n", SaveN);

  SaveN = 100;
  // save quote info
  F = fopen("top100Q.tab", "wt");
  fprintf(F, "QuoteId\tQuote\tUrl\tTotalVol\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV.Len()); i++) {
    fprintf(F, "%d\t%s\t%s\t%d\n", SaveN-i, GetQt(QtInfoV[i].Val2).QtStr.CStr(), GetStr(QtIdUrlH.GetDat(QtInfoV[i].Val2)), (int) QtInfoV[i].Val3);
  }
  fclose(F);
  printf("saveT\n");
  // save quote time frequency
  F = fopen("top100T.tab", "wt");
  fprintf(F, "Time\tQuoteId\tSmoothFreq\tRawFreq\n");
  for (int i = 0; i < TMath::Mn(SaveN, QtInfoV.Len()); i++) {
    const TQuote::TTmFltPrV RawFqV = QtInfoV[i].Val4;
    TQuote::GetSmoothFqOt(SmoothFqV, RawFqV, TmUnit, 6*24*3600/TmUnitSecs, 1.05, MinTm, MaxTm);  // more smootning

    IAssert(SmoothFqV.Len() >= RawFqV.Len());
    for (int d=0, ds = 0; ds < SmoothFqV.Len(); ds++) {
      if (TmUnit == tmuDay) { fprintf(F, "%s", SmoothFqV[ds].Val1.GetDtYmdStr().CStr()); }
      else { fprintf(F, "%s_%02d", SmoothFqV[ds].Val1.GetDtYmdStr().CStr(), SmoothFqV[ds].Val1.GetHourN()); }
      fprintf(F, "\t%d\t%.2f", SaveN-i, SmoothFqV[ds].Val2());
      if (d < RawFqV.Len() && SmoothFqV[ds].Val1 == RawFqV[d].Val1) { fprintf(F, "\t%.0f", RawFqV[d].Val2()); d++; } else { fprintf(F, "\t0"); }
      fprintf(F, "\n");
    }
  }
  fclose(F);
  printf("Top %d Done.\n", SaveN);

  printf("done.\n");
}

void TQuoteBs::SaveDomainStat(const TStr& OutFNm, const int& MinCnt) const {
  THash<TInt, TInt> MedOt, BlogOt, AllOt;
  int MedCnt=0, BlogCnt=0, AllCnt=0;
  TSecTm BegTm(2008,7,31,0,0,0);
  THash<TStr, TIntPr> DomCntH;
  for (int q = 0; q < QuoteH.Len(); q++) {
    const TQuote& Q = QuoteH[q];
    for (int u = 0; u < Q.GetUrls(); u++) {
      const int Day = TSecTm(Q.GetTm(u)-BegTm).GetInUnits(tmuDay);
      AllOt.AddDat(Day) += 1;  AllCnt++;
      if (GetUrlTy(Q.GetUrlId(u)) ==  utMedia) { MedOt.AddDat(Day) += 1;  MedCnt++;}
      else { BlogOt.AddDat(Day) += 1;  BlogCnt++; }
      TStr Dom = TStrUtil::GetDomNm2(GetStr(Q.GetUrlId(u)));
      DomCntH.AddDat(Dom).Val1 += 1;
      DomCntH.AddDat(Dom).Val2 += Q.GetCnt(u);
    }
  }
  TGnuPlot::PlotValCntH(AllOt, "ALL", MedOt, "MEDIA", BlogOt, "BLOGS", "overTm-"+OutFNm, TStr::Fmt("Quote freq over time: A:%d M:%d B:%d", AllCnt, MedCnt, BlogCnt),
    "Time [days]", "Number of quote mentions");
  DomCntH.SortByDat(false);
  FILE *F = fopen(TStr::Fmt("domains-%s.tab", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "# %d domains, total mentions:\tAll:%d\tMedia:%d\tBlogs:%d", DomCntH.Len(), AllCnt, MedCnt, BlogCnt);
  fprintf(F, "#Domain\tNumber of quotes mentioned\tNumber of mentions\n");
  for (int d = 0; d < DomCntH.Len(); d++) {
    fprintf(F, "%s\t%d\t%d\n", DomCntH.GetKey(d).CStr(), DomCntH[d].Val1(), DomCntH[d].Val2());
  }
  fclose(F);
}

void TQuoteBs::Dump(const bool& Fast) const {
  printf("Quote base:\n");
  if (! Fast) {
    TSecTm MinTm, MaxTm; GetMinMaxTm(MinTm, MaxTm);
    printf("  %s -- %s\n", MinTm.GetYmdTmStr().CStr(), MaxTm.GetYmdTmStr().CStr());
    int totCnt=0, totVol=0;
    for (int q = 0; q < QuoteH.Len(); q++) {
      const TQuote::TTmUrlCntV& V = QuoteH[q].TmUrlCntV;
      totCnt += V.Len();
      for (int u = 0; u < V.Len(); u++) {
        totVol += V[u].Cnt();
      }
    }
    printf("  %d number of posts mentioning quotes\n", totCnt);
    printf("  %d number of mentions\n", totVol);
  }
  printf("  %d quotes\n", Len());
  printf("  %d strings (quotes+urls, no domains)\n", StrQtIdH.Len());
  printf("  %d clusters\n", ClustQtIdVH.Len());
  printf("  %d quotes in clusters\n", GetQtsInClust());
  printf("  %d urls with type\n\n", UrlTyH.Len());
}

// Build QuoteBs of quotes in QuoteStrV but count their occurences in free text
PQuoteBs TQuoteBs::FindQtInTxtQBs(const TStr& InBinFNmWc, const TStrV& QuoteStrV) {
  THashSet<TMd5Sig> SeenUrlH; // skip duplicate urls
  PQuoteBs QtBsPt = TQuoteBs::New();
  TQuoteBs& QtBs = *QtBsPt;
  TStrHash<TInt> StrH;
  TVec<TIntV> QtWIdVV;
  TVec<TIntV> QtSkipLenVV; // if i-th word does not match, how much can we skip
  for (int q = 0; q < QuoteStrV.Len(); q++) {
    const int QId = QtBs.QuoteH.Len(); //QtBs.AddStr(QuoteStrV[q].CStr());
    TQuote& Qt = QtBs.QuoteH.AddDat(QId);
    Qt.QtCIdTy = TQuote::TQtIdTy(QId, qtQuote);
    Qt.QtStr = QuoteStrV[q];
    QtWIdVV.Add();  QtSkipLenVV.Add();
    TStrUtil::GetAddWIdV(StrH, QuoteStrV[q].CStr(), QtWIdVV.Last());
    TIntV& WIdV = QtWIdVV.Last();
    TIntV& SkipV = QtSkipLenVV.Last();
    SkipV.Add(1);
    for (int w = 1; w < WIdV.Len(); w++) {
      int skipLen = 1;
      while (w-skipLen >= 0 && WIdV[w-skipLen] != WIdV[w]) { skipLen++; }
      SkipV.Add(skipLen);
    }
  }
  TInt WId;
  TIntV TxtWIdV;
  TVec<char *> WrdV;
  TQuoteLoader QL(InBinFNmWc);
  //Fail; //!! chage so that we read the data directly from spinn3r parser
  while (QL.Next()) {
    if (SeenUrlH.IsKey(TMd5Sig(QL.PostUrlStr))) { continue; }
    SeenUrlH.AddKey(TMd5Sig(QL.PostUrlStr));
    // content word id vector (skip unknown words by insering -1)
    TxtWIdV.Clr();  WrdV.Clr(false);
    { TStrUtil::SplitWords(QL.ContentStr, WrdV);
    for (int w = 0; w < WrdV.Len(); w++) {
      if (StrH.IsKeyGetDat(WrdV[w], WId)) { TxtWIdV.Add(WId); }
      else if (TxtWIdV.Len()>0 && TxtWIdV.Last()!=-1) { TxtWIdV.Add(-1); }
    } }
    // count number of occurences
    for (int q = 0; q < QtWIdVV.Len(); q++) {
      const TIntV& QWIdV = QtWIdVV[q];
      const TIntV& SkipV = QtSkipLenVV[q];
      int cnt = 0;
      for (int w = 0; w < TxtWIdV.Len()-QWIdV.Len(); ) {
        int len = 0;
        while (len < QWIdV.Len() && TxtWIdV[w+len] == QWIdV[len]) { len++; }
        if (len == QWIdV.Len()) { cnt+=1;  w+=len; }
        else { w += SkipV[len]; }
      }
      if (cnt > 0) {
        QtBs.GetQtN(q).TmUrlCntV.Add(TQuote::TTmUrlCnt(QL.PubTm, QtBs.AddStr(QL.PostUrlStr), cnt));
        if (cnt > 255) { printf("*"); }
      }
    }
  }
  for (int i = 0; i < QtBs.Len(); i++) { QtBs.QuoteH[i].TmUrlCntV.Sort(); }
  return QtBsPt;
}

// Remove all punctuations in the quotes and replace with space
void TQuoteBs::QuoteFilter(TStr &QtStr) {
	TStrV WordV;	
	QtStr.SplitOnAllAnyCh(" ?!()@#=&,.<>/\\:\";{}|", WordV);	
	QtStr.Clr();
	for (int i = 0; i < WordV.Len(); ++i) {
		if (i > 0)	QtStr.InsStr(QtStr.Len()," ");
		QtStr.InsStr(QtStr.Len(), WordV[i]);
	}
}

// Check the file list, remove files that cannot be opened
void TQuoteBs::GetFileList(TStr InFNm, TChAV& filelist) {
  TChA Ln;
	// Get filelist from file 
	PSIn InFNmF = TFIn::New(InFNm);
	TStr FNm;
	while (! InFNmF->Eof() && InFNmF->GetNextLn(FNm))
		filelist.Add(FNm);

	TChAV DelList; 
	// Test whether each file is good 
	for (int i = 0; i < filelist.Len(); i++) {
		FILE *fin = fopen(filelist[i].CStr(), "r");
		if (fin == NULL) {
			printf("Error reading file %s, ignore...\n", filelist[i].CStr());
			DelList.Add(filelist[i]);
		} else {
			fclose(fin);
		}
	}
	for (int i = 0; i < DelList.Len(); i++)
		filelist.DelIfIn(DelList[i]);
}

// Filter out quotes that are too short or that are not words
bool TQuoteBs::IsValidQuote(const TQuote& Q, const int MnWrdLen, const int MxWrdLen) {
	char* CStr = Q.GetStr().CStr();
	int WrdCnt = 1, AlphabetCnt = 0, NonAlphabetCnt = 0;
	for (const char *c = CStr; *c; c++) {
		if (TCh::IsWs(*c)) 
			WrdCnt++;
		else if ('a'<=(*c) && (*c)<='z' || 'A'<=(*c) && (*c)<='Z')
			AlphabetCnt++;
		else
			NonAlphabetCnt++;
	}
	return (WrdCnt >= MnWrdLen && WrdCnt <= MxWrdLen && AlphabetCnt > NonAlphabetCnt*2 && AlphabetCnt >= WrdCnt*3);
}

//if there is redirect, pick the redirected url
//discard the url if there is any "facebook.com" or "twitter.com" in it (return false)
//domain is the second part delimited by '/'  (http://xxxx.xxxx.xxxxxx/...., here, xxxx.xxxx.xxxxxx is the domain)
bool TQuoteBs::CleanURL(TChA &UrlStr) {
	if ((UrlStr.SearchStr("facebook.com", 0)!=-1)||(UrlStr.SearchStr("twitter.com", 0)!=-1))
		return false;

	int LastHttpPos = 0, p;
	if (UrlStr.Len()>4) {
		TStr tail = UrlStr.GetSubStr(UrlStr.Len()-4,UrlStr.Len()-1);
		if ((tail.EqI(".jpg"))||(tail.EqI(".JPG"))||(tail.EqI(".png"))||(tail.EqI(".PNG"))||(tail.EqI(".gif"))||(tail.EqI(".GIF")))
			return false;
	}

	TStr url(UrlStr);
	url.ChangeStrAll("%3A", ":"); url.ChangeStrAll("%2F", "/"); url.ChangeStrAll("%3a", ":"); url.ChangeStrAll("%2f", "/");
	url.ChangeStrAll("%2E", "."); url.ChangeStrAll("%3D", "="); url.ChangeStrAll("%3F", "?"); url.ChangeStrAll("%26", "&");
	while (true) {
		p = LastHttpPos + 1;
		if (p < url.Len()) {
			p = url.SearchStr("http://", p);
			if (p == -1) break;
		}
		LastHttpPos = p;
	}
	if (LastHttpPos > 0) 	{
		url = url.GetSubStr(LastHttpPos,url.Len()-1);
		TStrV strV;
		url.SplitOnAllAnyCh("?=", strV);
		url = strV[0];
	}

	TStrV strV;
	url.SplitOnAllAnyCh("/?=&", strV);
  if (strV.Len()<2)
		return false;
	UrlStr = TChA(url);	
	return true;
}

// Filter quote base with blacklist
void TQuoteBs::FilterBlackListQuotes(TStr BlackListFNm) {
	TStr Ln;
	for (TFIn FIn(BlackListFNm); FIn.GetNextLn(Ln); ) {
		int QtId = GetQtId(Ln.ToTrunc().CStr());
		if (QtId != -1)
			QuoteH[QtId].TmUrlCntV.Clr(false);
	}
}

// Determine whether two quotes are in the same cluster
bool TQuoteBs::IsLinkPhrases(const int& QtN1, const int& QtN2, THash<TInt, TIntV>& QtToWordIdVH) {
  int idx1=0, idx2=0, SkipTy=0;
  const TIntV& WIdV1 = QtToWordIdVH.GetDat(QtN1);
  const TIntV& WIdV2 = QtToWordIdVH.GetDat(QtN2);
  const int ShortLen = TMath::Mn(WIdV1.Len(), WIdV2.Len());
  const int Overlap = TQuoteBs::LongestCmnSubSq(WIdV1, WIdV2, idx1, idx2, SkipTy);
  bool DoMerge = false;
  if (ShortLen <= 5 && Overlap == ShortLen /*&& SkipTy==0*/) { DoMerge=true; } // full overlap, no skip
  else if ((ShortLen == 6 && Overlap >= 5 )) { DoMerge=true; }
  else if (Overlap/double(ShortLen+3) > 0.5 || Overlap > 10) { DoMerge=true; }
  return DoMerge;
}

// Determine whether two quotes are in the same cluster, more strict criterion
bool TQuoteBs::IsLinkPhrases2(const int& QtN1, const int& QtN2, THash<TInt, TIntV>& QtToWordIdVH) {
  int idx1=0, idx2=0, SkipTy=0;
  const TIntV& WIdV1 = QtToWordIdVH.GetDat(QtN1);
  const TIntV& WIdV2 = QtToWordIdVH.GetDat(QtN2);
  const int ShortLen = TMath::Mn(WIdV1.Len(), WIdV2.Len());
  const int Overlap = TQuoteBs::LongestCmnSubSq(WIdV1, WIdV2, idx1, idx2, SkipTy);
  bool DoMerge = false;
  if (ShortLen <= 8) {
	  if (Overlap >= ShortLen /*&& SkipTy==0*/) { DoMerge=true; } // full overlap, no skip
  }	else {
	  if (Overlap >= ShortLen - 1 || Overlap >= 10)
		  DoMerge = true;
  }
  return DoMerge;
}

// Compute the quote distance between two quotes in the same 4-shingle bucket
double TQuoteBs::QuoteDistance(TInt Qt1, TInt Qt2, THash<TInt, TIntV>& QtToWordIdVH) {
	int idx1 = 0, idx2 = 0, SkipTy = 0;
	const TIntV& WIdV1 = QtToWordIdVH.GetDat(Qt1);
	const TIntV& WIdV2 = QtToWordIdVH.GetDat(Qt2);
	int ShortLen = TMath::Mn(WIdV1.Len(), WIdV2.Len());
	int Overlap = LongestCmnSubSq(WIdV1, WIdV2, idx1, idx2, SkipTy);
	Overlap -= 4; ShortLen -= 4;

	Assert(Overlap >= 0);
	if (ShortLen <= 0 || Overlap > 6) return 0;
	if (Overlap < 2) return (1 - Overlap/double(ShortLen));
	else return (1 - Overlap/double(ShortLen)) * (1 - Overlap / 7.0);
}

// Show how two quotes are connected in a quote cluster, print path between them
void TQuoteBs::ShowPathInfo(int CId, int srcQt, int dstQt, PClustNet& Net, TStr& PathFNm) {
	Assert(GetQtN(srcQt).GetCId() == CId);
	Assert(GetQtN(dstQt).GetCId() == CId);

	TClustNet::TNodeI srcNI = Net->GetNI(srcQt);
	THashSet<TInt> InspectedSet;
	TIntV NodeQueue;
	TIntH FatherH;

	NodeQueue.Add(srcQt);
	int idx = -1;
	while (true) {
		idx++;
		int curQt = NodeQueue[idx].Val;
		InspectedSet.AddKey(curQt);
		TClustNet::TNodeI NI = Net->GetNI(curQt);

		if (NI.GetId() == dstQt)
			break;

		for (int e = 0; e < NI.GetInDeg(); e++) {
			int childQt = NI.GetInNDat(e).GetId();
			if (InspectedSet.IsKey(childQt)) continue;
			NodeQueue.Add(childQt);
			FatherH.AddDat(childQt, curQt);
		}

		for (int e = 0; e < NI.GetOutDeg(); e++) {
			int childQt = NI.GetOutNDat(e).GetId();
			if (InspectedSet.IsKey(childQt)) continue;
			NodeQueue.Add(childQt);
			FatherH.AddDat(childQt, curQt);
		}
	}

	static FILE* FPath = fopen(PathFNm.CStr(), "w");
	for (int i = 0; i < 100; i++) fprintf(FPath, "-");
	fprintf(FPath, "\n");

	int curQt = dstQt, nextQt;
	while (curQt != srcQt) {
		nextQt = FatherH.GetDat(curQt);
		TQuote& curQ = GetQtN(curQt);
		TQuote& nextQ = GetQtN(nextQt);
		fprintf(FPath, "src: TmMed = %s\tTmDev = %.4f\tCnt = %d\t %s\n", curQ.GetTmMed().GetStr().CStr(), curQ.GetTmDev().Val, curQ.GetFq(), curQ.GetStr().CStr());
		fprintf(FPath, "dst: TmMed = %s\tTmDev = %.4f\tCnt = %d\t %s\n\n", nextQ.GetTmMed().GetStr().CStr(), nextQ.GetTmDev().Val, nextQ.GetFq(), nextQ.GetStr().CStr());
		curQt = nextQt;
	}
	fclose(FPath);
}

// Add elements(2-shingles) of quotes to an element set
void TQuoteBs::AddElemPool(THashSet<TIntPr>& ElemPool, TIntV& QtV, const PQuoteBs QtBs, THash<TInt, TIntV>& QtToWordIdVH) {
	for (int i = 0; i < QtV.Len(); i++) {
		int qt = QtV[i];
		TIntV& WordIdV = QtToWordIdVH.GetDat(qt);
		TIntPr Elem;
		for (int j = 0; j < WordIdV.Len()-1; j++) {
			Elem = TIntPr(WordIdV[j], WordIdV[j+1]);
			ElemPool.AddKey(Elem);
		}
	}
}

// Merge two element sets
void TQuoteBs::AddElemPool(THashSet<TIntPr>& ElemPool, THashSet<TIntPr>& CurElemPool) {
	for (int i = 0; i < CurElemPool.Len(); i++)
		ElemPool.AddKey(CurElemPool[i]);
}

// Decide whether two element sets are similar enough to be merged together
bool ElemPoolCanMerge(THashSet<TIntPr>& ElemPool, THashSet<TIntPr>& CurElemPool, double RefineTresh) {
	int Cnt1 = 0;
	for (int i = 0; i < ElemPool.Len(); i++) 
		if (CurElemPool.IsKey(ElemPool[i]))
			Cnt1++;
	double Ratio1 = Cnt1 / double(ElemPool.Len());

	int Cnt2 = 0; 
	for (int i = 0; i < CurElemPool.Len(); i++) 
		if (ElemPool.IsKey(CurElemPool[i]))
			Cnt2++;
	double Ratio2 = Cnt2 / double(CurElemPool.Len());

	if (1 - TMath::Mx(Ratio1, Ratio2) <= RefineTresh)
		return true;
	else 
		return false;
}

// Record the execution time
void TQuoteBs::RegisterTime(TStr Desc, bool Start, TStrHash<TStr>& TmRecH) {
	static TSecTm BegTm;
	if (Start) {
		BegTm = TSecTm::GetCurTm();
		return;
	}
	
	TSecTm EndTm = TSecTm::GetCurTm();
	double usedTime = EndTm.GetAbsSecs() - BegTm.GetAbsSecs();
	TStr TmStr = TStr::Fmt("%02dh%02dm%02ds\n", int(usedTime)/3600, (int(usedTime)%3600)/60, int(usedTime)%60);
	TmRecH.AddDat(Desc, TmStr);
}

// Dump execution time of each step
void TQuoteBs::ShowAllTime(TStrHash<TStr>& TmRecH) {
	for (int i = 0; i < TmRecH.Len(); i++) {
		printf("%s : %s\n", TmRecH.GetKey(i), TmRecH[i].CStr());
	}
}


// Hash 4-shingles of quotes into hash table
void TQuoteBs::HashingShgls(int MinQtFq, int MnWrdLen, int MxWrdLen, THash<TMd5Sig, TIntV>& ShglQtIdVH, THash<TInt, TIntV>& QtToWordIdVH, TStrHash<TInt>& WordIdH) {
	printf("Hashing shingles...\n");
	for (int qt = 0; qt < Len(); qt++) {
		if (qt % 100000 == 0) 
			printf("%d out of %d completed\n", qt, Len());
		TQuote& Q = GetQtN(qt);
		const int Doms = Q.GetDoms(*this);
		if (! (Doms>1 && Doms*4 > Q.GetUrls() && Q.GetFq() >= MinQtFq && IsValidQuote(Q, MnWrdLen, MxWrdLen))) {
			Q.TmUrlCntV.Clr(false);
			continue; 
		}
		
		/* Add to quote to word id vector hash table */
		TIntV WIdV;
		TStrUtil::GetAddWIdV(WordIdH, Q.GetStr().CStr(), WIdV);
	    if (! QtToWordIdVH.IsKey(qt)) {
			WIdV.Pack();
			QtToWordIdVH.AddDat(qt, WIdV);
		}
		/* Put 4 shingles into hash table */
		TStrV WdV;
		Q.GetStr().SplitOnAllCh(' ', WdV);
		for (int i = 0; i < WdV.Len()-3; i++) {
			TStr SmStr = WdV[i] + " " + WdV[i+1] + " " + WdV[i+2] + " " + WdV[i+3];
			const TMd5Sig SmStrMd5(SmStr);
			ShglQtIdVH.AddDat(SmStrMd5).Add(qt);
		}
	}
}

// Evaluate the quality of each bucket and eliminate bad ones
void TQuoteBs::ElimBadBkt(TStr& BlackListFNm, double BktThresh, THash<TMd5Sig, TIntV>& ShglQtIdVH, THash<TInt, TIntV>& QtToWordIdVH) {
	printf("Filter according to blacklist...\n");
	/* Eliminate bad shingles from blacklist */
	TStr Ln;
	for (TFIn FIn(BlackListFNm); FIn.GetNextLn(Ln); ) {
		TStrV WdV;
		Ln.SplitOnAllCh(' ', WdV);
		for (int i = 0; i < WdV.Len()-3; i++) {
			TStr BlStr = WdV[i] + " " + WdV[i+1] + " " + WdV[i+2] + " " + WdV[i+3];
			const TMd5Sig BlStrMd5(BlStr);
			if (ShglQtIdVH.IsKey(BlStrMd5))
				ShglQtIdVH.GetDat(BlStrMd5).Clr(false);
		}
	}

	printf("Identify and remove bad buckets...\n");

	int iCnt = 0;
	#pragma omp parallel for schedule(dynamic, 2000)
	for (int i = 0; i < ShglQtIdVH.Len(); i++) {
		if ((iCnt++) % 100000 == 0) {
			printf("%d out of %d completed\n", iCnt, ShglQtIdVH.Len());
		}
		TIntV& QtIdV = ShglQtIdVH[i];
		if (QtIdV.Len() > 500) {
			ShglQtIdVH[i].Clr(false);
			continue;
		}

		double dist =0; int cnt = 0;
		for (int q1 = 0; q1 < QtIdV.Len(); q1++) 
			for (int q2 = q1+1; q2 < QtIdV.Len(); q2++)  {
				int weight = GetQtN(QtIdV[q1]).GetUrls() * GetQtN(QtIdV[q2]).GetUrls();
				dist += QuoteDistance(QtIdV[q1], QtIdV[q2], QtToWordIdVH) * weight;
				cnt += weight;
			}
		dist = dist / cnt;

		if (dist > BktThresh)
			ShglQtIdVH[i].Clr(false);
	}
}

// Add links for quotes in each buckets
void TQuoteBs::BktAddLink(PClustNet& Net, THash<TMd5Sig, TIntV>& ShglQtIdVH, THash<TInt, TIntV>& QtToWordIdVH) {
	printf("Start adding links in each bucket\n");
	int MxBinSize = 0;
	for (int i = 0; i < QuoteH.Len(); i++)
		Net->AddNode(i);

	int iCnt = 0;
#pragma omp parallel for schedule(dynamic, 1000)
	for (int i = 0; i < ShglQtIdVH.Len(); i++) {
		if (iCnt++ % 100000 == 0) {
			printf("%d out of %d completed\n", iCnt, ShglQtIdVH.Len());
		}
		TIntV& QtV = ShglQtIdVH[i];
		if (QtV.Len() > MxBinSize)
			MxBinSize = QtV.Len();

		TIntPrV AddEdgeV;
		for (int qt1 = 0; qt1 < QtV.Len(); qt1++) 
			for (int qt2 = qt1 + 1; qt2 < QtV.Len(); qt2++) {
				int Sm, Lg;
				if (QtToWordIdVH.GetDat(QtV[qt1]).Len() <= QtToWordIdVH.GetDat(QtV[qt2]).Len()) {
					Sm = QtV[qt1]; Lg = QtV[qt2];
				} else {
					Sm = QtV[qt2]; Lg = QtV[qt1];
				}

				if (IsLinkPhrases(Sm, Lg, QtToWordIdVH)) {    // edit distance computation
					AddEdgeV.Add(TIntPr(Sm, Lg));			
				}
			}

		#pragma omp critical
		{
			for (int j = 0; j < AddEdgeV.Len(); j++) {
				TQuote& Q1 = GetQtN(AddEdgeV[j].Val1);
				TQuote& Q2 = GetQtN(AddEdgeV[j].Val2);
				Net->AddLink(Q1, Q2);
			}
		}
	}
	printf("Shingle hash table size = %d\n", ShglQtIdVH.Len());
	printf("Maximum hash bin size = %d\n", MxBinSize);
}

// Evaluate the quality of each link in the graph
void TQuoteBs::EvaluateLinks(PClustNet& Net, double MxTmDelay, double MxTmDev) {
	// Pre-computation of quotes time stats -- Optimization on speed if done in parallel
	printf("Pre-computation of quote stats...\n");
	// Parallel computation of quotes time stats -- Optimization on speed
	THashSet<TInt> QtSet;
	for (TClustNet::TNodeI NI = Net->BegNI(); NI < Net->EndNI(); NI++)
		QtSet.AddKey(NI.GetId());

	#pragma omp parallel for 
	for (int i = 0; i < QtSet.Len(); i++) {
		GetQtN(QtSet[i]).GetTmMed();
		GetQtN(QtSet[i]).GetTmDev();
	}

	printf("Finding bad edges according to time stamp information...\n");
	THashSet<TIntPr> DelEdge;
	for (TClustNet::TNodeI NI = Net->BegNI(); NI < Net->EndNI(); NI++) {
		const int srcN = NI.GetId();
		TQuote& srcQ = GetQtN(srcN);
		if (srcQ.GetTmDev() <= MxTmDev) {
			for (int e = 0; e < NI.GetOutDeg(); e++) {
				TQuote& dstQ = NI.GetOutNDat(e);
				if (fabs(double(dstQ.GetTmMed().GetAbsSecs()) - double(srcQ.GetTmMed().GetAbsSecs())) / 86400.0 > MxTmDelay) {
					DelEdge.AddKey(TIntPr(NI.GetId(), NI.GetOutNId(e)));
				}
			}
		} else {
			if (NI.GetOutDeg() <= 1) continue;
			double BestVal = 0;
			int EdgeToKeep = 0;
			for (int e = 0; e < NI.GetOutDeg(); e++) {
				TQuote& dstQ = NI.GetOutNDat(e);
				if (fabs(double(dstQ.GetTmMed().GetAbsSecs()) - double(srcQ.GetTmMed().GetAbsSecs())) / 86400.0 <= MxTmDelay)
					if (NI.GetOutNDat(e).GetUrls() > BestVal) { EdgeToKeep=e; BestVal=NI.GetOutNDat(e).GetTimes(); }
			}
			for (int e = 0; e < NI.GetOutDeg(); e++)  {
				if (e == EdgeToKeep) continue;
				DelEdge.AddKey(TIntPr(NI.GetId(), NI.GetOutNId(e)));
			}
		}
	}

	// Delete links
	for (int i = 0; i < DelEdge.Len(); i++)
		Net->DelEdge(DelEdge[i].Val1, DelEdge[i].Val2);
	printf("Evaluation of links done\n");
}

// Refine topic cluster by further partitioning in each cluster using more strict criterion
void TQuoteBs::RefineCluster(PClustNet& Net, double RefineThresh, THash<TInt, TIntV>& QtToWordIdVH) {
	printf("Refining quote clustering result\n");
	TIntH QtClustH;
	TIntPrV DelEdge;
	TVec<TIntV> QtVV;

	Net->GetClusters(QtVV);
	// Refine each cluster
	for (int qtVId = 0; qtVId < QtVV.Len(); qtVId++) {
		TIntV& QtV = QtVV[qtVId];
		PClustNet CNet = TClustNet::New();
		for (int i = 0; i < QtV.Len(); i++) {
			const TQuote& Qt = GetQtN(QtV[i]);
			CNet->AddNode(QtV[i], Qt);
		}

		// For each cluster, use more strict criterion to create links
		for (int i = 0; i < QtV.Len(); i++) {
			const TQuote& Qi = GetQtN(QtV[i]);
			for (int j = i+1; j < QtV.Len(); j++) {
				const TQuote& Qj = GetQtN(QtV[j]);
				if (QtToWordIdVH.GetDat(QtV[i]) <= QtToWordIdVH.GetDat(QtV[j])) {
					if (Net->IsEdge(QtV[i],QtV[j]))
						if (IsLinkPhrases2(QtV[i], QtV[j], QtToWordIdVH))
							CNet->AddLink(Qi, Qj);
				}
				if (QtToWordIdVH.GetDat(QtV[i]) >= QtToWordIdVH.GetDat(QtV[j])) {
					if (Net->IsEdge(QtV[j],QtV[i]))
						if (IsLinkPhrases2(QtV[j], QtV[i], QtToWordIdVH))
							CNet->AddLink(Qj, Qi);
				}
			}
		}

		TVec<TIntV> ClustV;
		CNet->GetClusters(ClustV);
		if (ClustV.Len() == 1) 
			continue;

		TVec<THashSet<TIntPr> > ElemPool;
		TVec<TIntV>	ClustVec;

		// For the several sub-clusters, try to merge them
		for (int i = 0; i < ClustV.Len(); i++) {
			THashSet<TIntPr> CurElemPool;
			AddElemPool(CurElemPool, ClustV[i], this, QtToWordIdVH);
			bool Merged = false;
			for (int k = 0; k < ClustVec.Len(); k++) {
				if (ElemPoolCanMerge(ElemPool[k], CurElemPool, RefineThresh)) {
					AddElemPool(ElemPool[k], CurElemPool);
					ClustVec[k].AddV(ClustV[i]);
					Merged = true;
					break;
				}
			}
			if (Merged) continue;

			// Start a new cluster 
			ElemPool.Add(CurElemPool);
			ClustVec.Add(ClustV[i]);
		}

		for (int k = 0; k < ClustVec.Len(); k++) {
			for (int i = 0; i < ClustVec[k].Len(); i++) {
				QtClustH.AddDat(ClustVec[k][i]) = k;
			}
		}
	}
	
	// Record edges that are not in the same components for later deletion
	for (TClustNet::TEdgeI EI = Net->BegEI(); EI < Net->EndEI(); EI++) {
		int srcQtId = EI.GetSrcNId();
		int dstQtId = EI.GetDstNId();
		if (!QtClustH.IsKey(srcQtId)) continue;
		if (!QtClustH.IsKey(dstQtId)) continue;
		if (QtClustH.GetDat(srcQtId) != QtClustH.GetDat(dstQtId))
			DelEdge.Add(TIntPr(srcQtId, dstQtId));
	}

	// Delete edges
	for (int i = 0; i < DelEdge.Len(); i++) 
		Net->DelEdge(DelEdge[i].Val1, DelEdge[i].Val2);
	printf("DONE\n");
	return;
}

// Filter the quote with blacklist and refine the TmUrlCntV by removing duplicates
void TQuoteBs::RefineQtBs(TStr& BlackListFNm, int MinQtFq,int MnWrdLen, int MxWrdLen) {
	THashSet<TMd5Sig> BanDomSet;
	TFIn FIn("TEST_BanDom.txt");
	for (TStr Ln; FIn.GetNextLn(Ln);) 
		BanDomSet.AddKey(TMd5Sig(Ln.GetStr()));

	printf("Refining Quote Base\n");
	FilterBlackListQuotes(BlackListFNm);
	for (int i = 0; i < QuoteH.Len(); i++) {
		if (i % 10000 == 0) printf("%d out of %d completed\n", i, QuoteH.Len());
		TQuote& Qt = QuoteH[i];
		Qt.TmUrlCntV.Sort(true);
		const int Doms = Qt.GetDoms(*this);
		if (! (Doms>1 && Doms*4 > Qt.GetUrls() && Qt.GetFq() >= MinQtFq && IsValidQuote(Qt, MnWrdLen, MxWrdLen))) {
			Qt.TmUrlCntV.Clr(false);
			continue;
		}

		TVec<TQuote::TTmUrlCnt> RecV;
		THash<TChA, TSecTm> DomTmH;
		THash<TChA, TInt> DomCntH;
		TSecTm LastTm(0);
		TSecTm MemeBegTm(Qt.TmUrlCntV[Qt.TmUrlCntV.Len()/2].Tm().GetAbsSecs() - 3600 * 24 * 7);
		for (int j = 0; j < Qt.TmUrlCntV.Len(); j++) {
			if (Qt.GetTmDev() < 7 && MemeBegTm > Qt.TmUrlCntV[j].Tm()) continue; // Remove beginning noise
			TChA UrlStr = TChA(GetStr(Qt.TmUrlCntV[j].UrlId()));
			TSecTm CurTm = Qt.TmUrlCntV[j].Tm();
			TChA DomNm = TStrUtil::GetDomNm2(UrlStr);
			if (DomNm.IsPrefix(".")) DomNm = DomNm.GetSubStr(1, TInt::Mx);
			if (BanDomSet.IsKey(TMd5Sig(DomNm))) continue; // Clear the banned domains
			DomCntH.AddDat(DomNm) ++;
			if (CurTm.GetAbsSecs() == LastTm.GetAbsSecs()) continue;
			if (!DomTmH.IsKey(DomNm) || (CurTm.GetAbsSecs() - DomTmH.GetDat(DomNm).GetAbsSecs() > 6 * 60 * 60)) {
				DomTmH.AddDat(DomNm) = CurTm;
				RecV.Add(Qt.TmUrlCntV[j]);
				LastTm = CurTm;
			}
		}
		// Find out spam domain
		THashSet<TChA> SpamDomSet;
		for (int j = 0; j < DomCntH.Len(); j++)
			if (DomCntH[j] > 25) SpamDomSet.AddKey(DomCntH.GetKey(j));

		// Filter out spam domains
		Qt.TmUrlCntV.Clr(false);
		for (int j = 0; j < RecV.Len(); j++) {
			TChA UrlStr = TChA(GetStr(RecV[j].UrlId()));
			TChA DomNm = TStrUtil::GetDomNm2(UrlStr);
			if (DomNm.IsPrefix(".")) DomNm = DomNm.GetSubStr(1, TInt::Mx);
			if (SpamDomSet.IsKey(DomNm)) continue;
			Qt.TmUrlCntV.Add(RecV[j]);
		}
	}
}

// Get time range from the file name, Zarya convention
void GetZaryaTm(TChA& FNm, TSecTm& MnTm, TSecTm& MxTm) {
	int p = FNm.SearchCh('T');
	TChA YearStr, MonthStr, DateStr, HourStr;
	int Year, Month, Date, Hour;
	YearStr = FNm.GetSubStr(p-10, p-7); MonthStr = FNm.GetSubStr(p-5, p-4); DateStr = FNm.GetSubStr(p-2, p-1); HourStr = FNm.GetSubStr(p+1, p+2);
	Year = atoi(YearStr.CStr()); Month = atoi(MonthStr.CStr()); Date = atoi(DateStr.CStr()); Hour = atoi(HourStr.CStr());
	MnTm = TSecTm(Year, Month, Date, Hour);
	p = FNm.SearchCh('T', p+1);
	YearStr = FNm.GetSubStr(p-10, p-7); MonthStr = FNm.GetSubStr(p-5, p-4); DateStr = FNm.GetSubStr(p-2, p-1); HourStr = FNm.GetSubStr(p+1, p+2);
	Year = atoi(YearStr.CStr()); Month = atoi(MonthStr.CStr()); Date = atoi(DateStr.CStr()); Hour = atoi(HourStr.CStr());
	MxTm = TSecTm(Year, Month, Date, Hour);
}

// Construct quote base from spinn3r data on Zarya
void TQuoteBs::ConstructQtBsZarya(TStr InFNm, TStr Pref, TStr MediaUrlFNm, TSecTm MinTm, TSecTm MaxTm, int MinWrdLen, int MinMemeFq, int HSize, int UrlSize) {
	TChAV filelist;
	GetFileList(InFNm, filelist);

	printf("Start constructing quote base...\n");
  int NSkip = 0, fileCnt = 0;
	THash<TMd5Sig, TInt> MemeCntH(Mega(HSize), true);
  //THashSet<TMd5Sig> SeenUrlSet(Mega(UrlSize), true);
	THash<TMd5Sig, TSecTm> SeenUrlSet(Mega(UrlSize), true);
	TVec<TIntV> VIdxVV(filelist.Len());

	// For parallel processing compatibility
	TZipIn::IsZipExt("rar");

#if 1
	FILE *FLog = fopen("FileLog.txt", "w");
	// Read files
	#pragma omp parallel for schedule(dynamic)
	for (int i = 0; i < filelist.Len(); i++) {
		TMemesDataLoader Memes;
		TVec<TMd5SigV> QtStrVV;
		TMd5SigV UrlSigV;
		TSecTmV PubTmV;
		TIntV IdxV;
		int idx = -1;
		// Compute local time range 
		TSecTm FileMnTm, FileMxTm; 
		GetZaryaTm(filelist[i], FileMnTm, FileMxTm);

		#pragma omp critical
		Memes.LoadFile(filelist[i]);

		while (Memes.LoadNext()) {    
			idx++;
			if (strstr(Memes.PostUrlStr.CStr(), "facebook.com") != NULL) continue;
			if (strstr(Memes.PostUrlStr.CStr(), "twitter.com") != NULL) continue;
			if (Memes.PubTm.GetAbsSecs() < FileMnTm.GetAbsSecs() - 0.5 * 3600 || Memes.PubTm.GetAbsSecs() > FileMxTm.GetAbsSecs() + 0.5 * 3600) continue;
			UrlSigV.Add(TMd5Sig(Memes.PostUrlStr));
			int len = QtStrVV.Len();
			QtStrVV.Add(TMd5SigV());
			IdxV.Add(idx);
			PubTmV.Add(Memes.PubTm);
			for (int m = 0; m < Memes.MemeV.Len(); m++) {
				if (Memes.MemeV[m].CountCh('?') <= Memes.MemeV[m].Len()/2) {					// Filter non-English quote
					TStr qtStr = Memes.MemeV[m];
					QuoteFilter(qtStr);
					QtStrVV[len].Add(TMd5Sig(qtStr));
				}
			}
		}
		#pragma omp critical 
		{
			for (int p = 0; p < QtStrVV.Len(); p++) {
				if (SeenUrlSet.IsKey(UrlSigV[p]) && SeenUrlSet.GetDat(UrlSigV[p]) < PubTmV[p]) {NSkip++;continue;}
				//SeenUrlSet.AddKey(UrlSigV[p]);
				SeenUrlSet.AddDat(UrlSigV[p]) = PubTmV[p];
				VIdxVV[i].Add(IdxV[p]);
				for (int j = 0; j < QtStrVV[p].Len(); j++)
					MemeCntH.AddDat(QtStrVV[p][j]) += 1;
			}
			printf("1: Complete %d file out of %d files HashSize = %d UrlSize = %d\n", ++fileCnt, filelist.Len(), MemeCntH.Len(), SeenUrlSet.Len());
			fprintf(FLog, "1: Complete %d file out of %d files HashSize = %d UrlSize = %d\n", fileCnt, filelist.Len(), MemeCntH.Len(), SeenUrlSet.Len());
			fprintf(FLog, "%s Completed\n", filelist[i].CStr());
		}
	}
	fclose(FLog);

  printf("Done \nAll quotes: %d\n", MemeCntH.Len());
  printf("  skip %d urls, keep %d\n", NSkip, SeenUrlSet.Len());
	TFOut FOut(Pref+"-MemeFq.strHash");
	MemeCntH.Save(FOut); VIdxVV.Save(FOut);
	TFOut FOut2(Pref+"-UrlTm.bin");
	SeenUrlSet.Save(FOut2);
#else
	TFIn FIn(Pref+"-MemeFq.strHash"); MemeCntH.Load(FIn); VIdxVV.Load(FIn);
	TFIn FIn2(Pref+"-UrlTm.bin"); SeenUrlSet.Load(FIn2);
#endif
	// Find the frequent quotes
  THashSet<TMd5Sig> FqMemeSet;
  for (int i = 0; i < MemeCntH.Len(); i++) {
	if (MemeCntH[i] >= MinMemeFq) {
		FqMemeSet.AddKey(MemeCntH.GetKey(i)); }
  }
	MemeCntH.Clr(true);
	printf("Number of frequent quotes: %d\n", FqMemeSet.Len());

	StatsH.AddDat("Links added to quote base") = 0; StatsH.AddDat("Urls added to quote base") = 0;
	NSkip = 0; 	fileCnt = 0;
	// Add frequent quote containing memes into quote base
	#pragma omp parallel for schedule(dynamic)
	for (int i = 0; i < filelist.Len(); i++) {
		if (i >= VIdxVV.Len()) continue;
		TMemesDataLoader Memes;
		TVec<TChAV> MemeVV;
		TVec<TChAV> LinkVV;
		TChAV PostUrlStrV;
		TSecTmV PubTmV;
		TMd5SigV UrlSigV; 
		TIntV& VIdxV = VIdxVV[i];
		int idx = -1, k = 0;

		#pragma omp critical
		Memes.LoadFile(filelist[i]);

		while (Memes.LoadNext()) {
			idx++; 
			int ClrCnt = 0;
			for (int m = 0; m < Memes.MemeV.Len(); m++) { // delete non-frequent memes
				TStr qtStr = Memes.MemeV[m];
				QuoteFilter(qtStr);
				Memes.MemeV[m] = qtStr;
				if (! FqMemeSet.IsKey(TMd5Sig(Memes.MemeV[m]))) {
					Memes.MemeV[m].Clr();
					ClrCnt++;
				}
			}
			if (Memes.MemeV.Len() > ClrCnt && Memes.PubTm >= MinTm && Memes.PubTm <= MaxTm) {
				UrlSigV.Add(TMd5Sig(Memes.PostUrlStr));
				CleanURL(Memes.PostUrlStr);
				MemeVV.Add(Memes.MemeV); PostUrlStrV.Add(Memes.PostUrlStr); PubTmV.Add(Memes.PubTm);
				TChAV LinkV;
				for (int j = 0; j < Memes.LinkV.Len(); j++) 
					if (CleanURL(Memes.LinkV[j]))
						LinkV.Add(Memes.LinkV[j]);
				LinkVV.Add(LinkV);
			}
			
			if (k >= VIdxV.Len()) break;
			while (idx < VIdxV[k]-1) {if (!Memes.LoadNextSkip()) break; idx++;} k++;
		}
		#pragma omp critical
		{ printf("Start critical\n"); //??? DEBUG
			for (int p = 0; p < MemeVV.Len(); p++) { 
				if (!SeenUrlSet.IsKey(UrlSigV[p]) || SeenUrlSet.GetDat(UrlSigV[p]).GetAbsSecs() != PubTmV[p].GetAbsSecs()) continue;
				SeenUrlSet.GetDat(UrlSigV[p]) = TSecTm(0);
				StatsH.GetDat("Urls added to quote base") += 1; StatsH.GetDat("Links added to quote base") += LinkVV[p].Len();
				AddQuote(MemeVV[p], LinkVV[p], PostUrlStrV[p], PubTmV[p], MinWrdLen); 
			}
			printf("\n2: Complete %d out of %d files\n", ++fileCnt, filelist.Len());
		}
	}

	SetUrlTy(MediaUrlFNm, utMedia);  // set url type
  printf("SAVE: %d quotes\n", Len());
	{TFOut FOut(TStr::Fmt("%s-w%dmfq%d.QtBs", Pref.CStr(), MinWrdLen, MinMemeFq)); Save(FOut);}
	printf("Meme2QtBs DONE!\n");
	printf("Total Urls = %d, Total Links = %d\n", StatsH.GetDat("Urls added to quote base").Val, StatsH.GetDat("Links added to quote base").Val);//???TEST
}

// Construct quote base from a list of quotes, each line in the input file contains one quote
void TQuoteBs::ConstructQtBsQtOnly(TStr InFNm, TStr Pref, TStr MediaUrlFNm, int MinWrdLen, int MinMemeFq, int HSize) {

	// First pass, count quotes
	PSIn SInPt;
	if (TZipIn::IsZipExt(InFNm.GetFExt())) {
		SInPt = TZipIn::New(InFNm); }
	else {
		SInPt = TFIn::New(InFNm); 
	}	
	TSIn& SIn = *SInPt;
	TChA CurLn;
	THash<TMd5Sig, TInt> MemeCntH(Mega(HSize), true);
	while (!SIn.Eof()) {
		SIn.GetNextLn(CurLn);	if (CurLn.Empty()) continue;
		TStr QtStr = CurLn;
		QuoteFilter(QtStr);
		MemeCntH.AddDat(TMd5Sig(QtStr));
	}
	printf("Adding to MemeCntH DONE! Size = %d\n", MemeCntH.Len());

  THashSet<TMd5Sig> FqMemeSet;
  for (int i = 0; i < MemeCntH.Len(); i++) {
	if (MemeCntH[i] >= MinMemeFq) {
		FqMemeSet.AddKey(MemeCntH.GetKey(i)); }
  }
	
	// Second pass, add quotes to quote base
	if (TZipIn::IsZipExt(InFNm.GetFExt())) {
		SInPt = TZipIn::New(InFNm); }
	else {
		SInPt = TFIn::New(InFNm); 
	}	
	TSIn& SIn2 = *SInPt;
	CurLn.Clr();
	TSecTm DefaultTm(2010,1,1);
	int UrlStrNb = 0;
	while (!SIn2.Eof()) {
		SIn2.GetNextLn(CurLn);	if (CurLn.Empty()) continue;
		TStr QtStr = CurLn;
		QuoteFilter(QtStr);
		TChAV MemeV; MemeV.Add(QtStr);
		AddQuote(MemeV, TChAV(), TStr::Fmt("http://%d.com/%d.html", UrlStrNb, UrlStrNb), DefaultTm, MinWrdLen);
		UrlStrNb++;
	}
	ComputeMnMxTm();
}

// Construct quote base from a list of timestamped quotes, format of each line is "<time>\t<quote>"
void TQuoteBs::ConstructQtBsQtTime(TStr InFNm, TStr Pref, TStr MediaUrlFNm, int MinWrdLen, int MinMemeFq, int HSize) {
	// First pass, count quotes
	PSIn SInPt;
	if (TZipIn::IsZipExt(InFNm.GetFExt())) {
		SInPt = TZipIn::New(InFNm); }
	else {
		SInPt = TFIn::New(InFNm); 
	}	
	TSIn& SIn = *SInPt;
	TChA CurLn;
	THash<TMd5Sig, TInt> MemeCntH(Mega(HSize), true);
	while (!SIn.Eof()) {
		SIn.GetNextLn(CurLn);	if (CurLn.Empty()) continue;
		int TabPos = CurLn.SearchCh('\t'); 
		TStr QtStr = CurLn.GetSubStr(TabPos+1, CurLn.Len()-1);
		QuoteFilter(QtStr);
		MemeCntH.AddDat(TMd5Sig(QtStr));
	}
	printf("Adding to MemeCntH DONE! Size = %d\n", MemeCntH.Len());

  THashSet<TMd5Sig> FqMemeSet;
  for (int i = 0; i < MemeCntH.Len(); i++) {
	if (MemeCntH[i] >= MinMemeFq) {
		FqMemeSet.AddKey(MemeCntH.GetKey(i)); }
  }
	
	// Second pass, add quotes to quote base
	if (TZipIn::IsZipExt(InFNm.GetFExt())) {
		SInPt = TZipIn::New(InFNm); }
	else {
		SInPt = TFIn::New(InFNm); 
	}	
	TSIn& SIn2 = *SInPt;
	CurLn.Clr();
	int UrlStrNb = 0;
	while (!SIn2.Eof()) {
		SIn2.GetNextLn(CurLn);	if (CurLn.Empty()) continue;
		int TabPos = CurLn.SearchCh('\t'); 
		TStr QtStr = CurLn.GetSubStr(TabPos+1, CurLn.Len()-1);
		QuoteFilter(QtStr);
		TChAV MemeV; MemeV.Add(QtStr);
		CurLn[TabPos] = 0; 
		TSecTm PubTm(atoi(CurLn.CStr()));
		AddQuote(MemeV, TChAV(), TStr::Fmt("http://%d.com/%d.html", UrlStrNb, UrlStrNb), PubTm, MinWrdLen);
		UrlStrNb++;
	}
	ComputeMnMxTm();
}

// Cluster quotes in the quote base
void TQuoteBs::ClusterQuotes(int MinQtFq,int MnWrdLen, int MxWrdLen, TStr& BlackListFNm, TStr& Pref, bool IsShglReady, bool IsNetReady, double BktThresh, double MxTmDelay, double MxTmDev, double RefineThresh) {
	TStrHash<TStr> TmRecH;
	RegisterTime("Start", true, TmRecH);

	PClustNet Net = TClustNet::New();
	THash<TMd5Sig, TIntV> ShglQtIdVH;
	THash<TInt, TIntV> QtToWordIdVH;
	TStrHash<TInt> WordIdH;
	
	TStr ShglFN(Pref+".Shgl");
	if (!IsShglReady) { // Whether hash table already exists
		HashingShgls(MinQtFq, MnWrdLen, MxWrdLen, ShglQtIdVH, QtToWordIdVH, WordIdH); RegisterTime("Hashing Shingles", false, TmRecH);
		ElimBadBkt(BlackListFNm, BktThresh, ShglQtIdVH, QtToWordIdVH); RegisterTime("Eliminate Bad Bucket", false, TmRecH);
		// Save Shingle hash table
		TFOut ShglOut(ShglFN); ShglQtIdVH.Save(ShglOut); QtToWordIdVH.Save(ShglOut); WordIdH.Save(ShglOut);
	} else { // If existed, load it directly
		printf("Loading Shingles hash table\n");
		TFIn ShglIn(ShglFN); ShglQtIdVH.Load(ShglIn); QtToWordIdVH.Load(ShglIn); WordIdH.Load(ShglIn); RegisterTime("Loading Shingle Hash Table", false, TmRecH);
		printf("Loading complete\n");
	}

	RefineQtBs(BlackListFNm, MinQtFq, MnWrdLen, MxWrdLen);

	TStr NetFN(Pref+".ClustNet");
	if (!IsNetReady) { // Whether cluster net aleady exists
		BktAddLink(Net, ShglQtIdVH, QtToWordIdVH);	RegisterTime("Bucket Add Links", false, TmRecH);
		printf("Start Evaluation of links\n");
		EvaluateLinks(Net, MxTmDelay, MxTmDev);	 RegisterTime("Evaluate Links", false, TmRecH);
		printf("Complete Evaluation of links\n");
		{ printf("save clust net\n"); TFOut FOut(NetFN); Net->Save(FOut); }
	} else { // If exists, load the cluster net directly
		printf("Loading ClustNet\n");
		TFIn NetIn(NetFN); 
		Net = TClustNet::Load(NetIn);	RegisterTime("Loading Cluster Net", false, TmRecH);
		printf("Loading complete\n");
	}
	RefineCluster(Net, RefineThresh, QtToWordIdVH); RegisterTime("Refine clustering result", false, TmRecH);

	//for (int q = 0; q < QuoteH.Len(); q++) 
	//	if (QuoteH[q].GetUrls() > 30) Net->AddNode(q);

	// Print the info of the cluster net
	TSnap::PrintInfo(Net);  

	// Set cluster for quote base 
	TVec<TIntV> ClustV;
  printf("Get clusters\n");
  Net->GetClusters(ClustV);
  printf("Set cluster\n");
  CreateClusters(ClustV); // set clusters
	printf("CLUSTERING DONE!\n");
	{ TFOut FOut(Pref+"-clust.QtBs"); Save(FOut); }
	ShowAllTime(TmRecH);
}

// Output quote clusters
void TQuoteBs::DumpQuoteClusters(int MinWrdLen, int MinClustFq, bool SkipUrl, bool FlashDisp, TStr Pref) {
	printf("Dumping quote cluster information\n");
  // Save top clusters by frequency
  TIntV QtIdV;
  GetQtIdVByFq(QtIdV, MinWrdLen, MinClustFq, false); QtIdV.Del(TMath::Mn(100, QtIdV.Len()-1), QtIdV.Len()-1);
  SaveQuotes(QtIdV, Pref+"-quotes.txt");
  SaveClusters(Pref, true); SaveClusters(Pref+"Url_Ori", false);

	AddMergedClusters();
	SaveMergedClusters(Pref+"Url_Clust");

	// Save top quote clusters for flash display
//  const TUrlTy CntUrlTy = utUndef;
//  GetQtIdVByFq(QtIdV, MinWrdLen, MinClustFq, true, "", "", CntUrlTy);
//	SaveForFlashAll(QtIdV, Pref+"_Top", tmu4Hour, MnTm, MxTm, CntUrlTy);
	printf("DUMP DONE!\n");
}

void TQuoteBs::AnalyzeQuoteTrend(TStr Pref, const TTmUnit TmUnit, const int TmCnt, int TopN){
	TStr InQtBsNm = Env.GetIfArgPrefixStr("-i:", "", "Input quote base file name");
	TStr QtType;
	if (InQtBsNm.IsSuffix("30.QtBs")) {
		QtType = TStr("Variant");
	} else if (InQtBsNm.IsSuffix("Centroid.QtBs")) {
		QtType = TStr("Centr");
	} else if (InQtBsNm.IsSuffix("Cluster.QtBs")) {
		QtType = TStr("Clust");
	}

	TMemeTrend MT(this, Pref+QtType, true);
	MT.GetDomNmFromUrl("For Parallel Compatibility"); 

	//MT.ComputeAllFeatures();
	MT.TESTDataSetStats();


	int VBegDomNum = 30;

	// Meme trend
	TIntPrV QtVarPrV;
	//MT.PlotTopClusterVariants(TopN, 50, "TopVariants_", TmUnit, TmCnt);
	MT.GetTrendQtPrV(QtVarPrV, 30, TopN, TmUnit, TmCnt); 
	MT.PlotTrendQtPrV(QtVarPrV, "QtTrendVar", TmUnit, TmCnt);
	MT.DumpTrendQt(QtVarPrV, "STATS_QtTrend.txt", true); MT.DumpTrendQt(QtVarPrV, "STATS_QtTrendUrl.txt", false);
	MT.DumpFeatureStats(QtVarPrV, TmUnit, TmCnt);

	// Feature properties
//	MT.PlotPopVSFeatureVal(VBegDomNum, true);
//	MT.PlotFeatureValDistribution(VBegDomNum, true);

	// Machine Learning Task
//	MT.DumpClassificationTaskData(VBegDomNum, true);
//	MT.DumpDifferentiationTaskData(QtVarPrV, VBegDomNum, true);
//	MT.DumpNaiveBayesTaskData(VBegDomNum);

	// Feature evolution
//	MT.FeatureEvolution(60);

	// Graph properties
	MT.RewireCoMGraph();
	MT.CoMentionLinkG();

	// Test functions
	MT.GetDuplicateDoms();
	//MT.InvestigateBadQuote(VBegDomNum);
	MT.DomainPrediction(QtVarPrV, VBegDomNum);
}

void TQuoteBs::DumpClusterQuoteBs(TStr Pref) {
	printf("Creating two artificial quote base for quote cluster and quote centroid\n");
	TIntV CentroidV;
	for (int i = 0; i < ClustQtIdVH.Len(); i++) {
		TIntV& QtV = ClustQtIdVH[i];
		int QtNRec = 0; double BestVal = -1;
		for (int j = 0; j < QtV.Len(); j++) {
			if (BestVal < QuoteH[QtV[j]].GetUrls()) {
				BestVal = QuoteH[QtV[j]].GetUrls();
				QtNRec = QtV[j];
			}					
		}
		if (QuoteH[QtNRec].GetUrls() < 30) continue;
		CentroidV.Add(QtNRec);
	}
	GetSampleQtBs(CentroidV, Pref+TStr("-Centroid"));

	AddMergedClusters();
	TIntV ClustV;
	for (int i = 0; i < QuoteH.Len(); i++) {
		TQuote& Qt = QuoteH[i];
		if (Qt.GetTy() != qtCentr) continue;
		if (Qt.GetUrls() < 30) continue;
		ClustV.Add(i);
		Qt.QtCIdTy = TQuote::TQtIdTy(-1, qtRoot);
	}
	printf("Total Clusters : %d\n", ClustV.Len());
	GetSampleQtBs(ClustV, Pref+TStr("-Cluster"));
}


/////////////////////////////////////////////////
// Meme Trend
void TMemeTrend::SetDefaultValues() {
	DomBlackListFNm = TStr("urldom_blacklist.txt");
	VSameCount						=		5;
	VMinMultiple					=		3;
	VMaxDifference				=		0.3;
	VCoMThresh						=		0.75;

	HiThresh = 240;
	LoThresh = 80;

	IsUseOnlyBlogDom = 0;
	IsUseOnlyMediaDom = 0;
	GetFeatureStrV();
}

TStr TMemeTrend::GetDomNmFromUrl(TStr UrlStr) {
	static THashSet<TChA> BlogDomain;
	if (BlogDomain.Len() == 0) {
		printf("Loading blog domain info file\n");
		TStr BlogFNm("blog_domain.txt"); TStr Ln;
		for (TFIn FIn(BlogFNm); FIn.GetNextLn(Ln);) {
			if (Ln.GetTrunc().Len() > 0)
				BlogDomain.AddKey(Ln.GetTrunc());
		}
		printf("%d blog domains loaded in total\n", BlogDomain.Len());
	}
	TChA DomStr = TStrUtil::GetDomNm2(UrlStr);
	if (DomStr.IsPrefix(".")) DomStr = DomStr.GetSubStr(1, TInt::Mx);
	int QPos = DomStr.SearchCh('?');
	if (QPos != -1)
		DomStr = DomStr.GetSubStr(0, QPos-1);
	if (DomStr.IsPrefix("feeds.")) DomStr = DomStr.GetSubStr(6, TInt::Mx);
	for (int i = 0; i < BlogDomain.Len(); i++) 
		if (DomStr.IsSuffix(BlogDomain[i]))
			return BlogDomain[i];
	return DomStr;
}

void TMemeTrend::GetFeatureStrV() {
	// PROJ 
	FeatureStrV.Add("ProjNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("ProjEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("ProjTriadNum"); IntvlV.Add(100);
	FeatureStrV.Add("ProjCompNum"); IntvlV.Add(3);
	FeatureStrV.Add("ProjGccNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("ProjGccEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("ProjGccSize"); IntvlV.Add(0.05);
	FeatureStrV.Add("ProjMaxDeg"); IntvlV.Add(3);
	FeatureStrV.Add("ProjD0NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("ProjD1NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("ProjDensity"); IntvlV.Add(0.05);
	FeatureStrV.Add("ProjClusterCoeff"); IntvlV.Add(0.05);
	FeatureStrV.Add("ProjEntExcessDeg"); IntvlV.Add(200);
	
	// NONNET
	FeatureStrV.Add("NonNetMBRatio"); IntvlV.Add(0.05);
	FeatureStrV.Add("NonNetReportTime"); IntvlV.Add(0.2);
	FeatureStrV.Add("NonNetWordNum"); IntvlV.Add(3);
	FeatureStrV.Add("NonNetCharNum"); IntvlV.Add(20);

	// EXT
	FeatureStrV.Add("ExtLinkNodeNum"); IntvlV.Add(1);
	FeatureStrV.Add("ExtLinkEdgeNum"); IntvlV.Add(2);
	FeatureStrV.Add("ExtCoMNodeNum"); IntvlV.Add(0.5);
	FeatureStrV.Add("ExtCoMEdgeNum"); IntvlV.Add(1);	
	
	// COM
	FeatureStrV.Add("CoMNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("CoMEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("CoMTriadNum"); IntvlV.Add(100);
	FeatureStrV.Add("CoMCompNum"); IntvlV.Add(3);
	FeatureStrV.Add("CoMGccNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("CoMGccEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("CoMGccSize"); IntvlV.Add(0.05);
	FeatureStrV.Add("CoMMaxDeg"); IntvlV.Add(3);
	FeatureStrV.Add("CoMD0NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("CoMD1NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("CoMDensity"), IntvlV.Add(0.05);
	FeatureStrV.Add("CoMClusterCoeff"); IntvlV.Add(0.05);
	FeatureStrV.Add("CoMEntExcessDeg"); IntvlV.Add(200);

	// DEG
	FeatureStrV.Add("DegLinkIn"); IntvlV.Add(0.5); 
	FeatureStrV.Add("DegLinkOut"); IntvlV.Add(0.5);
	FeatureStrV.Add("DegCoMIn"); IntvlV.Add(0.5); 
	FeatureStrV.Add("DegCoMOut"); IntvlV.Add(0.5);
	FeatureStrV.Add("DegMixIn"); IntvlV.Add(0.5); 
	FeatureStrV.Add("DegMixOut"); IntvlV.Add(0.5);
	FeatureStrV.Add("DegMixExtIn"); IntvlV.Add(0.5); 
	FeatureStrV.Add("DegMixExtOut"); IntvlV.Add(0.5);
	

	// CONN
	FeatureStrV.Add("ConnNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("ConnEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("ConnTriadNum"); IntvlV.Add(50);
	FeatureStrV.Add("ConnCompNum"); IntvlV.Add(3);
	FeatureStrV.Add("ConnGccNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("ConnGccEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("ConnGccSize"); IntvlV.Add(0.05);
	FeatureStrV.Add("ConnMaxDeg"); IntvlV.Add(3);
	FeatureStrV.Add("ConnD0NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("ConnD1NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("ConnDensity"); IntvlV.Add(0.05);
	FeatureStrV.Add("ConnClusterCoeff"); IntvlV.Add(0.05);

	// MIX
	FeatureStrV.Add("MixNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("MixEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("MixTriadNum"); IntvlV.Add(100);
	FeatureStrV.Add("MixCompNum"); IntvlV.Add(1);
	FeatureStrV.Add("MixGccNodeNum"); IntvlV.Add(5);
	FeatureStrV.Add("MixGccEdgeNum"); IntvlV.Add(20);
	FeatureStrV.Add("MixGccSize"); IntvlV.Add(0.05);
	FeatureStrV.Add("MixMaxDeg"); IntvlV.Add(3);
	FeatureStrV.Add("MixD0NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("MixD1NodeNum"); IntvlV.Add(2);
	FeatureStrV.Add("MixDensity"); IntvlV.Add(0.005);
	FeatureStrV.Add("MixClusterCoeff"); IntvlV.Add(0.02);
	FeatureStrV.Add("MixInCompCoeff"); IntvlV.Add(5);
	FeatureStrV.Add("MixOutCompCoeff"); IntvlV.Add(20);
	
	// TIME
	FeatureStrV.Add("TimeSpan"); IntvlV.Add(0.1);

	FILE* F = fopen("INFO_feature.txt", "w");
	for (int i = 0; i < FeatureStrV.Len(); i++) 
		fprintf(F, "%d\t\t%s\n", i+1, FeatureStrV[i].CStr());
	fclose(F);
}

TMemeTrend::TMemeTrend(PQuoteBs _QtBs, TStr _Pref, bool IsDataReady) {
	printf("Initializing meme trend class...\n");
	QtBs = _QtBs;
	Pref = _Pref;
	SetDefaultValues();

	TStr GFNm = TStr::Fmt("%s-DomGraph.bin", Pref.CStr());
	TStr StatsFNm = TStr::Fmt("%s-Stats.bin", Pref.CStr());
	if (IsDataReady) {
		printf("Loading domain graph stats\n");
		{
			TFIn FIn(GFNm);
			RawLinkDomNet = *(TNodeEDatNet<TInt, TInt>::Load(FIn));
			RawCoMDomNet = *(TNodeEDatNet<TInt, TInt>::Load(FIn)); 
			DomIdH.Load(FIn); DomStrH.Load(FIn);
			GetDomLinkGraph();
			GetDomCoMGraph();
			PrintDomNetInfo(LinkDomNet);
			PrintDomNetInfo(CoMDomNet);
			GetDomMixGraph();
			DomUNGraph = PUNGraph::New(); GetDomUNGraph();
		}
		printf("Loading pre-computed stats\n");
		{
			TFIn FIn(StatsFNm);
			//DomTmH.Load(FIn); 
			//LinkTriadH.Load(FIn); CoMTriadH.Load(FIn); 
			//MixInDegH.Load(FIn); MixOutDegH.Load(FIn); 
			//MixExtInDegH.Load(FIn); MixExtOutDegH.Load(FIn); 
			//InCompH.Load(FIn); OutCompH.Load(FIn);
			ComputeQtReportTime();
			ComputeTriadH(LinkDomNet, LinkTriadH);
			ComputeTriadH(CoMDomNet, CoMTriadH);
			ComputeMixDegH();
			ComputeCompH();
			printf("DomTmH Len = %d\n", DomTmH.Len());
			printf("LinkTriadH Len = %d, CoMTriadH Len = %d\n", LinkTriadH.Len(), CoMTriadH.Len());
			printf("MixInDegH Len = %d, MixOutDegH Len = %d\n", MixInDegH.Len(), MixOutDegH.Len());
			printf("MixExtIn Len = %d, MixExtOut Len = %d\n", MixExtInDegH.Len(), MixExtOutDegH.Len());
			printf("InCompH Len = %d, OutCompH Len = %d\n", InCompH.Len(), OutCompH.Len());
		}
	} else {
		printf("Computing domain graph and pre-computed stats\n");
		GetRawDomGraph();
		GetDomLinkGraph();
		GetDomCoMGraph();
		GetDomMixGraph();
		DomUNGraph = PUNGraph::New(); GetDomUNGraph();

		ComputeQtReportTime();
		ComputeTriadH(LinkDomNet, LinkTriadH);
		ComputeTriadH(CoMDomNet, CoMTriadH);
		ComputeMixDegH();
		ComputeCompH();
		{
			TFOut FOut(GFNm); 
			RawLinkDomNet.Save(FOut); 
			RawCoMDomNet.Save(FOut); 
			DomIdH.Save(FOut); DomStrH.Save(FOut);
		}
		{
			TFOut FOut(StatsFNm); 
			DomTmH.Save(FOut); 
			LinkTriadH.Save(FOut); CoMTriadH.Save(FOut);
			MixInDegH.Save(FOut); MixOutDegH.Save(FOut); 
			MixExtInDegH.Save(FOut); MixExtOutDegH.Save(FOut);
			InCompH.Save(FOut); OutCompH.Save(FOut);
		}
	}
}

// Graph Construction and Pre-stats computing

void TMemeTrend::GetRawDomGraph() {
	printf("Computing Raw Domain Graph... \n");
	printf("Computing Raw CoMention Graph \n");
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		if (i % 100000 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
		const TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;

		TIntSet DomIdSet;
		for (int j = 0; j < Qt.TmUrlCntV.Len(); j++) {
			TStr UrlStr = QtBs->GetStr(Qt.TmUrlCntV[j].UrlId()); 
			TStr DomStr = GetDomNmFromUrl(UrlStr);
			TMd5Sig DomSig(DomStr);
			if (!DomIdH.IsKey(DomSig)) {
				DomIdH.AddDatId(DomSig);
				if (QtBs->GetUrlTy(Qt.TmUrlCntV[j].UrlId())==utMedia)
					DomStr = TStr("M_") + DomStr;
				else 
					DomStr = TStr("B_") + DomStr;
				DomStrH.AddDat(DomIdH.GetDat(DomSig)) = DomStr;
				RawCoMDomNet.AddNode(DomIdH.GetDat(DomSig), TInt(0)); // CoMention Graph
				RawLinkDomNet.AddNode(DomIdH.GetDat(DomSig), TInt(0)); // Link Graph
			}
			int DomId = DomIdH.GetDat(DomSig);
			DomIdSet.AddKey(DomId);			
		}
		for (int idx = 0; idx < DomIdSet.Len(); idx++)
			RawCoMDomNet.GetNDat(DomIdSet[idx]) += 1;

		for (int idx1 = 0; idx1 < DomIdSet.Len(); idx1++) 
			for (int idx2 = 0; idx2 < DomIdSet.Len(); idx2++) {
				if (idx1 == idx2) continue;
				if (!RawCoMDomNet.IsEdge(DomIdSet[idx1], DomIdSet[idx2])) 
					RawCoMDomNet.AddEdge(DomIdSet[idx1], DomIdSet[idx2], TInt(0));
				RawCoMDomNet.GetEDat(DomIdSet[idx1], DomIdSet[idx2]) += 1;
			}
	}

	// Delete nodes that have too few mentions
	printf("Deleting domains that have too few mentions\n");
	TIntV DelNodeV;
	TNodeEDatNet<TInt, TInt>::TNodeI NI;
	for (NI = RawCoMDomNet.BegNI(); NI < RawCoMDomNet.EndNI(); NI++) {
		if (NI.GetDat() < 10)
			DelNodeV.Add(NI.GetId());
	}
	for (int i = 0; i < DelNodeV.Len(); i++) 
		RawCoMDomNet.DelNode(DelNodeV[i]);

	printf("Computing Raw Link Graph... \n");
	int TotLinks = 0;
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		if (i % 100000 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
		const TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;

		for (int j = 0; j < Qt.TmUrlCntV.Len(); j++) {
			TStr UrlStr = QtBs->GetStr(Qt.TmUrlCntV[j].UrlId());
			TStr DomStr = GetDomNmFromUrl(UrlStr);
			TMd5Sig DomSig(DomStr);
			int DomId = DomIdH.GetDat(DomSig);
			//RawLinkDomNet.GetNDat(DomId) += 1;
			if (!QtBs->UrlLkH.IsKey(Qt.TmUrlCntV[j].UrlId())) continue;
			TIntSet& LinkIdSet = QtBs->UrlLkH.GetDat(Qt.TmUrlCntV[j].UrlId());
			for (int k = 0; k < LinkIdSet.Len(); k++) {
				TStr LinkStr = QtBs->GetStr(LinkIdSet[k]);
				TStr LinkDomStr = GetDomNmFromUrl(LinkStr);
				TMd5Sig LinkDomSig(LinkDomStr);
				if (!DomIdH.IsKey(LinkDomSig)) continue;
				int LinkDomId = DomIdH.GetDat(LinkDomSig);
				RawLinkDomNet.GetNDat(LinkDomId) += 1;
				if (!RawLinkDomNet.IsEdge(DomId, LinkDomId))
					RawLinkDomNet.AddEdge(DomId, LinkDomId);
				RawLinkDomNet.GetEDat(DomId, LinkDomId) += 1;
				TotLinks += 1;
			}
		}
	}
	printf("Total hyperlinks in the domain graph = %d\n", TotLinks);
}

void TMemeTrend::GetDomLinkGraph() {
	printf("Get hyperlink graph from raw graph\n");
	TIntSet BanIdSet;
	TFIn FIn(DomBlackListFNm);
	for (TStr Ln; FIn.GetNextLn(Ln);) {
		TMd5Sig DomSig(Ln);
		if (!DomIdH.IsKey(DomSig)) continue;
		int DomId = DomIdH.GetDat(DomSig);
		BanIdSet.AddKey(DomId);
	}

	for (TNodeEDatNet<TInt, TInt>::TNodeI NI = RawLinkDomNet.BegNI(); NI < RawLinkDomNet.EndNI(); NI++) {
		if (BanIdSet.IsKey(NI.GetId())) continue;
		LinkDomNet.AddNode(NI.GetId(), 1);
	}

	for (TNodeEDatNet<TInt, TInt>::TEdgeI EI = RawLinkDomNet.BegEI(); EI < RawLinkDomNet.EndEI(); EI++)  {
		if (!LinkDomNet.IsNode(EI.GetSrcNId()) || !LinkDomNet.IsNode(EI.GetDstNId())) continue;
		if (EI.GetDat().Val >= 1) 
			LinkDomNet.AddEdge(EI.GetSrcNId(), EI.GetDstNId(), 1);
	}
}

void TMemeTrend::GetDomCoMGraph() {
	printf("Get CoMention graph from raw graph\n");
	TNodeEDatNet<TInt, TInt>::TNodeI NI;
	for (NI = RawCoMDomNet.BegNI(); NI < RawCoMDomNet.EndNI(); NI++)
		CoMDomNet.AddNode(NI.GetId(), 1);

	TNodeEDatNet<TInt, TInt>::TEdgeI EI;
	for (EI = RawCoMDomNet.BegEI(); EI < RawCoMDomNet.EndEI(); EI++) {
		int SrcDomId = EI.GetSrcNId();
		int DstDomId = EI.GetDstNId();
		if (EI.GetDat().Val / double(RawCoMDomNet.GetNDat(SrcDomId).Val) >= VCoMThresh)
			CoMDomNet.AddEdge(SrcDomId, DstDomId, 1);
	}
}

void TMemeTrend::GetDomMixGraph() {
	printf("Get weighted hyperlink graph from hyperlink and comention graphs\n");
	TNodeEDatNet<TInt, TInt>::TNodeI NI;
	for (NI = LinkDomNet.BegNI(); NI < LinkDomNet.EndNI(); NI++)
		MixDomNet.AddNode(NI.GetId(), 1.0);

	TNodeEDatNet<TInt, TInt>::TEdgeI EI;
	for (EI = LinkDomNet.BegEI(); EI < LinkDomNet.EndEI(); EI++) {
		int SrcId = EI.GetSrcNId();
		int DstId = EI.GetDstNId();
		double EdgeWeight = 0;
		if (RawCoMDomNet.IsEdge(SrcId, DstId)) {
				EdgeWeight = RawCoMDomNet.GetEDat(SrcId, DstId).Val / double(RawCoMDomNet.GetNDat(DstId).Val);		
		}
		MixDomNet.AddEdge(SrcId, DstId, EdgeWeight);
	}
}

void TMemeTrend::GetDomUNGraph() {
	printf("Computing undirected hyperlink graph\n");
	TNodeEDatNet<TInt, TInt>::TNodeI NI;
	for (NI = LinkDomNet.BegNI(); NI < LinkDomNet.EndNI(); NI++)
		DomUNGraph->AddNode(NI.GetId());
	TNodeEDatNet<TInt, TInt>::TEdgeI EI;
	for (EI = LinkDomNet.BegEI(); EI < LinkDomNet.EndEI(); EI++) 
		DomUNGraph->AddEdge(EI.GetSrcNId(), EI.GetDstNId());
	TCnComV CnComV;
	TSnap::GetWccs(DomUNGraph, CnComV);
	for (int i = 0; i < CnComV.Len(); i++) 
		for (int j = 0; j < CnComV[i].NIdV.Len(); j++)
			CcH.AddDat(CnComV[i].NIdV[j]) = i;
	printf("Printing information about undirected hyperlink graph\n");
	TSnap::PrintInfo(DomUNGraph);
}

void TMemeTrend::ComputeQtReportTime() {
	printf("Computing average domain report time\n");
	THash<TInt, TInt> DomCntH;
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		if (i % 100000 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		if (Qt.GetTmDev() > 7) continue;

		TIntSet DomIdSet;
		for (int j = 0; j < Qt.TmUrlCntV.Len(); j++) {
			TStr UrlStr = QtBs->GetStr(Qt.TmUrlCntV[j].UrlId()); 
			TStr DomStr = GetDomNmFromUrl(UrlStr);
			TMd5Sig DomSig(DomStr);
			if (!DomIdH.IsKey(DomSig)) continue;
			int DomId = DomIdH.GetDat(DomSig);
			if (!LinkDomNet.IsNode(DomId)) continue;
			if (DomIdSet.IsKey(DomId)) continue;
			DomIdSet.AddKey(DomId);
			double TmDif = (double(Qt.TmUrlCntV[j].Tm().GetAbsSecs()) - double(Qt.GetTmMed().GetAbsSecs())) / 3600 / 24;
			DomTmH.AddDat(DomId) += TmDif;
			DomCntH.AddDat(DomId) += 1;	
		}
	}
	for (int i = 0; i < DomTmH.Len(); i++) {
		int DomId = DomTmH.GetKey(i);
		DomTmH[i] = DomTmH[i].Val / DomCntH.GetDat(DomId).Val;
	}
	DomTmH.SortByDat(true);

	FILE* F = fopen("STATS_ReportTime.txt", "w");
	for (int i = 0; i < DomTmH.Len(); i++) {
		fprintf(F, "%d  %s : %f\n", DomCntH.GetDat(DomTmH.GetKey(i)).Val, DomStrH.GetDat(DomTmH.GetKey(i)).CStr(), DomTmH[i].Val);
	}
	fclose(F);

	printf("DomTmH Len = %d\n", DomTmH.Len());
}

void TMemeTrend::ComputeTriadH(TNodeEDatNet<TInt, TInt>& DomNet, TIntFltH& TriadH) {
	printf("Computing TriadH....\n");
	TNodeEDatNet<TInt, TInt>::TNodeI NI;
	for (NI = DomNet.BegNI(); NI < DomNet.EndNI(); NI++) {
		TIntSet DomIdSet;
		for (int e = 0; e < NI.GetInDeg(); e++) 
			DomIdSet.AddKey(NI.GetInNId(e));
		int Cnt = 0;
		for (int idx1 = 0; idx1 < DomIdSet.Len(); idx1++)
			for (int idx2 = idx1+1; idx2 < DomIdSet.Len(); idx2++) 
				if (DomNet.IsEdge(DomIdSet[idx1], DomIdSet[idx2]) || DomNet.IsEdge(DomIdSet[idx2], DomIdSet[idx1]))
					Cnt += 1;
		TriadH.AddDat(NI.GetId()) += Cnt;
	}
	printf("TriadH Len = %d\n", TriadH.Len());
}

void TMemeTrend::ComputeMixDegH() {
	printf("Computing MixDegH...\n");
	for (TNodeEDatNet<TFlt, TFlt>::TNodeI NI = MixDomNet.BegNI(); NI < MixDomNet.EndNI(); NI++) {
		int DomId = NI.GetId();
		MixInDegH.AddDat(DomId) = 0;
		MixOutDegH.AddDat(DomId) = 0;
		for (int e = 0; e < NI.GetInDeg(); e++)
			MixInDegH.AddDat(DomId) += NI.GetInEDat(e).Val;
		for (int e = 0; e < NI.GetOutDeg(); e++) 
			MixOutDegH.AddDat(DomId) += NI.GetOutEDat(e).Val;
	}
	printf("Computing MixExtDegH...\n");
	for (TNodeEDatNet<TFlt, TFlt>::TNodeI NI = MixDomNet.BegNI(); NI < MixDomNet.EndNI(); NI++) {
		int DomId = NI.GetId();
		MixExtInDegH.AddDat(DomId);
		MixExtOutDegH.AddDat(DomId);
		for (int e = 0; e < NI.GetInDeg(); e++)
			MixExtInDegH.AddDat(DomId) += MixInDegH.GetDat(NI.GetInNId(e));
		for (int e = 0; e < NI.GetOutDeg(); e++) 
			MixExtOutDegH.AddDat(DomId) += MixOutDegH.GetDat(NI.GetOutNId(e));
	}
	printf("MixInDegH Len = %d, MixOutDegH Len = %d\n", MixInDegH.Len(), MixOutDegH.Len());
	printf("MixExtIn Len = %d, MixExtOut Len = %d\n", MixExtInDegH.Len(), MixExtOutDegH.Len());
}

void TMemeTrend::ComputeCompH() {
	InCompH.Clr(false);
	OutCompH.Clr(false);
	printf("Computing competition coefficients\n");
	TIntPrSet EdgeSet;
	for (TNodeEDatNet<TInt,TInt>::TEdgeI EI = RawCoMDomNet.BegEI(); EI < RawCoMDomNet.EndEI(); EI++) {
		if (EI.GetDat() < 200) continue; // TEST
		if (LinkDomNet.IsEdge(EI.GetSrcNId(), EI.GetDstNId()) || LinkDomNet.IsEdge(EI.GetDstNId(), EI.GetSrcNId())) continue;
		if (EdgeSet.IsKey(TIntPr(EI.GetDstNId(), EI.GetSrcNId()))) continue;
		EdgeSet.AddKey(TIntPr(EI.GetSrcNId(), EI.GetDstNId()));
	}

	TFltV InSimV(EdgeSet.Len()), OutSimV(EdgeSet.Len());
	#pragma omp parallel for
	for (int i = 0; i < EdgeSet.Len(); i++) {
		int SrcId = EdgeSet[i].Val1;
		int DstId = EdgeSet[i].Val2;
		double InSim, OutSim;
		ComputeLinkJaccardSim(InSim, OutSim, SrcId, DstId);
		InSimV[i] = InSim; OutSimV[i] = OutSim;
	}

	for (int i = 0; i < EdgeSet.Len(); i++) {
		int SrcId = EdgeSet[i].Val1;
		int DstId = EdgeSet[i].Val2;
		double InSim = InSimV[i].Val, OutSim = OutSimV[i].Val;
		InCompH.AddDat(TIntPr(SrcId, DstId)) = InSim;
		InCompH.AddDat(TIntPr(DstId, SrcId)) = InSim;
		OutCompH.AddDat(TIntPr(SrcId, DstId)) = OutSim;
		OutCompH.AddDat(TIntPr(DstId, SrcId)) = OutSim;
	}
	printf("Total # of competition pair = %d \n", EdgeSet.Len());
	printf("InCompH Len = %d, OutCompH Len = %d\n", InCompH.Len(), OutCompH.Len());
}

void TMemeTrend::ComputeLinkJaccardSim(double& InSim, double& OutSim, int SrcId, int DstId) {
	InSim = 0; OutSim = 0;
	if (!LinkDomNet.IsNode(SrcId) || !LinkDomNet.IsNode(DstId))	return;

	const TNodeEDatNet<TInt, TInt>::TNodeI SrcNI = LinkDomNet.GetNI(SrcId), DstNI = LinkDomNet.GetNI(DstId);
	TIntSet InSet;
	for (int e = 0; e < SrcNI.GetInDeg(); e++)
		InSet.AddKey(SrcNI.GetInNId(e));
	double InOverlap = 0;
	for (int e = 0; e < DstNI.GetInDeg(); e++) 
		if (InSet.IsKey(DstNI.GetInNId(e))) 
			InOverlap += 1;
	if (DstNI.GetInDeg() + SrcNI.GetInDeg() == 0) 
		InSim = 0;
	else 
		InSim = InOverlap / (SrcNI.GetInDeg() + DstNI.GetInDeg() - InOverlap);

	TIntSet OutSet;
	for (int e = 0; e < SrcNI.GetOutDeg(); e++)
		OutSet.AddKey(SrcNI.GetOutNId(e));
	double OutOverlap = 0;
	for (int e = 0; e < DstNI.GetOutDeg(); e++) 
		if (OutSet.IsKey(DstNI.GetOutNId(e))) 
			OutOverlap += 1;
	if (DstNI.GetOutDeg() + SrcNI.GetOutDeg() == 0) 
		OutSim = 0;
	else 
		OutSim = OutOverlap / (SrcNI.GetOutDeg() + DstNI.GetOutDeg() - OutOverlap);
}

void TMemeTrend::ComputeCoMJaccardSim(double& InSim, double& OutSim, int SrcId, int DstId) {
	InSim = 0; OutSim = 0;
	if (!RawCoMDomNet.IsNode(SrcId) || !RawCoMDomNet.IsNode(DstId))	return;

	const TNodeEDatNet<TInt, TInt>::TNodeI SrcNI = RawCoMDomNet.GetNI(SrcId), DstNI = RawCoMDomNet.GetNI(DstId);
	TIntSet InSet;
	for (int e = 0; e < SrcNI.GetInDeg(); e++)
		InSet.AddKey(SrcNI.GetInNId(e));
	double InOverlap = 0;
	for (int e = 0; e < DstNI.GetInDeg(); e++) 
		if (InSet.IsKey(DstNI.GetInNId(e))) 
			InOverlap += 1;
	if (DstNI.GetInDeg() + SrcNI.GetInDeg() == 0) 
		InSim = 0;
	else 
		InSim = InOverlap / (SrcNI.GetInDeg() + DstNI.GetInDeg() - InOverlap);

	TIntSet OutSet;
	for (int e = 0; e < SrcNI.GetOutDeg(); e++)
		OutSet.AddKey(SrcNI.GetOutNId(e));
	double OutOverlap = 0;
	for (int e = 0; e < DstNI.GetOutDeg(); e++) 
		if (OutSet.IsKey(DstNI.GetOutNId(e))) 
			OutOverlap += 1;
	if (DstNI.GetOutDeg() + SrcNI.GetOutDeg() == 0) 
		OutSim = 0;
	else 
		OutSim = OutOverlap / (SrcNI.GetOutDeg() + DstNI.GetOutDeg() - OutOverlap);
}

// Feature computing

void TMemeTrend::GetSubGraphFromDomNet(TVec<TFltV>& DomAdj, const TIntV& DomIdV, const TNodeEDatNet<TFlt, TFlt>& DomNet) {
	int N = DomIdV.Len(); 
	DomAdj.Reserve(N);
	for (int i = 0; i < N; i++) {
		TFltV AdjRow(N);
		DomAdj.Add(AdjRow);
	}
	for (int i = 0; i < N; i++) {
		int SrcDomId = DomIdV[i];
		for (int j = i; j < N; j++) {
			if (i == j) { DomAdj[i][j] = 1e10; continue;}
			int DstDomId = DomIdV[j];
			double EdgeVal = -1;
			if (DomNet.IsEdge(SrcDomId, DstDomId))
				EdgeVal = DomNet.GetEDat(SrcDomId, DstDomId).Val;
			if (DomNet.IsEdge(DstDomId, SrcDomId) && EdgeVal < DomNet.GetEDat(DstDomId, SrcDomId))
				EdgeVal = DomNet.GetEDat(DstDomId, SrcDomId);
			if (EdgeVal >= 0) {
				DomAdj[i][j] = EdgeVal / TMath::Mn(DomNet.GetNDat(SrcDomId).Val, DomNet.GetNDat(DstDomId).Val);
			}	else {
				DomAdj[i][j] = 1e10;
			}
			DomAdj[j][i] = DomAdj[i][j];
		}
	}
}

void TMemeTrend::GetSubGraphFromDomNet(TVec<TFltV>& DomAdj, const TIntV& DomIdV, const TNodeEDatNet<TInt, TInt>& DomNet) {
	int N = DomIdV.Len(); 
	DomAdj.Reserve(N);
	for (int i = 0; i < N; i++) {
		TFltV AdjRow(N);
		DomAdj.Add(AdjRow);
	}
	for (int i = 0; i < N; i++) {
		int SrcDomId = DomIdV[i];
		for (int j = i; j < N; j++) {
			if (i == j) { DomAdj[i][j] = 1e10; continue;}
			int DstDomId = DomIdV[j];
			double EdgeVal = -1;
			if (DomNet.IsEdge(SrcDomId, DstDomId))
				EdgeVal = DomNet.GetEDat(SrcDomId, DstDomId).Val;
			if (DomNet.IsEdge(DstDomId, SrcDomId) && EdgeVal < DomNet.GetEDat(DstDomId, SrcDomId))
				EdgeVal = DomNet.GetEDat(DstDomId, SrcDomId);
			if (EdgeVal >= 0) {
				DomAdj[i][j] = EdgeVal / TMath::Mn(DomNet.GetNDat(SrcDomId).Val, DomNet.GetNDat(DstDomId).Val);
			}	else {
				DomAdj[i][j] = 1e10;
			}
			DomAdj[j][i] = DomAdj[i][j];
		}
	}
}

// Compute edge number
double TMemeTrend::ComputeQtEdgeNum(const TVec<TFltV>& DomAdj) {
	int N = DomAdj.Len();
	double edgeNum = 0;
	for (int i = 0; i < N; i++)
		for (int j = i+1; j < N; j++) {
			if (DomAdj[i][j] < 1e9)
				edgeNum += 1;
		}
	return edgeNum;
}

// Compute triad number
double TMemeTrend::ComputeQtTriadNum(const TVec<TFltV>& DomAdj) {
	int N = DomAdj.Len();
	double triadNum = 0;
	for (int i = 0; i < N; i++)
		for (int j = i+1; j < N; j++)  {
			if (DomAdj[i][j] > 1e9) continue;
			for (int k = j+1; k < N; k++) {
				if (DomAdj[i][k] > 1e9) continue;
				if (DomAdj[j][k] > 1e9) continue;
				triadNum += 1;
			}
		}
	return triadNum;
}

// Compute Gcc related statistics
void TMemeTrend::ComputeQtGcc(const TVec<TFltV>& DomAdj, double& compNum, double& gccNodeNum, double& gccEdgeNum) {
	PUNGraph G = PUNGraph::New();
	for (int i = 0; i < DomAdj.Len(); i++)
		G->AddNode(i);
	for (int i = 0; i < DomAdj.Len(); i++)
		for (int j = i+1; j < DomAdj.Len(); j++) 
			if (DomAdj[i][j] > 0 && DomAdj[i][j] < 1e9)
				G->AddEdge(i,j);
	TCnComV CnComV;
	TSnap::GetWccs(G, CnComV);

	compNum = CnComV.Len();
	gccNodeNum = CnComV[0].NIdV.Len();
	gccEdgeNum = 0;
	for (int i = 0; i < CnComV[0].NIdV.Len(); i++) {
		for (int j = i+1; j < CnComV[0].NIdV.Len(); j++) {
			if (G->IsEdge(CnComV[0].NIdV[i], CnComV[0].NIdV[j]))
				gccEdgeNum += 1;
		}
	}
}

// Compute degree distribution related statistics
void TMemeTrend::ComputeQtDeg(const TVec<TFltV>& DomAdj, double& maxDeg, double& d0NodeNum, double& d1NodeNum) {
	int N = DomAdj.Len();
	maxDeg = 0; d0NodeNum = 0; d1NodeNum = 0;
	for (int i = 0; i < N; i++) {
		int deg = 0;
		for (int j = 0; j < N; j++) {
			if (j == i) continue;
			if (DomAdj[i][j] < 1e9)
				deg += 1;
		}
		if (deg > maxDeg) maxDeg = deg;
		if (deg == 0) d0NodeNum += 1;
		if (deg == 1) d1NodeNum += 1;
	}
}

void TMemeTrend::ComputeQtInOutDeg(const TNodeEDatNet<TInt, TInt>& DomNet, const TIntV& DomIdV, double& AvgInDeg, double& AvgOutDeg) {
	AvgInDeg = 0; AvgOutDeg = 0; 
	for (int i = 0; i < DomIdV.Len(); i++) {
		int DomId = DomIdV[i];
		if (!DomNet.IsNode(DomId)) continue;
		double InDeg = DomNet.GetNI(DomId).GetInDeg();
		double OutDeg = DomNet.GetNI(DomId).GetOutDeg();
		AvgInDeg += log(InDeg + 1);
		AvgOutDeg += log(OutDeg + 1);
	}
	AvgInDeg /= DomIdV.Len();
	AvgOutDeg /= DomIdV.Len();
}

// Compute statistics for each quote
double TMemeTrend::ComputeQtDensity(const TVec<TFltV>& DomAdj) {
	double density = 0; 
	int N = DomAdj.Len(); 
	for (int i = 0; i < N; i++) 
		for (int j = i+1; j < N; j++) {
			if (DomAdj[i][j] < 1e9)
				density += DomAdj[i][j];
		}
	density /= N*(N-1)/2;
	return density;
}

double TMemeTrend::ComputeQtClusterCoeff(const TVec<TFltV>& DomAdj) {
	int N = DomAdj.Len(); 
	double sum = 0;
	for (int i = 0; i < N; i++) {
		double Triad = 0;
		double CloseTriad = 0;
		for (int j = 0; j < N; j++) {
			if (DomAdj[i][j] > 1e9) continue;
			for (int k = j+1; k < N; k++) {
				if (DomAdj[i][k] > 1e9) continue;
				Triad += DomAdj[i][j] * DomAdj[i][k];
				if (DomAdj[j][k] > 1e9) continue;
				CloseTriad += DomAdj[i][j] * DomAdj[i][k] * DomAdj[j][k];
			}
		}

		if (Triad > 0)
			sum += CloseTriad / Triad;
	}
	return sum / N;
}

double TMemeTrend::ComputeQtMBRatio(const TIntV& DomIdV) {
	double MediaCnt = 0;
	for (int i = 0; i < DomIdV.Len(); i++) {
		int DomId = DomIdV[i];
		if (DomStrH.GetDat(DomId).IsPrefix("M_"))
			MediaCnt++;
	}
	return MediaCnt / DomIdV.Len();
}

double TMemeTrend::ComputeQtReportTime(const TIntV& DomIdV) {
	double sum = 0, cnt = 0;
	for (int i = 0; i < DomIdV.Len(); i++)
		if (DomTmH.IsKey(DomIdV[i])) {
			sum += DomTmH.GetDat(DomIdV[i]);
			cnt = cnt + 1;
		}
	if (cnt == 0) 
		return 0;
	else
		return sum / cnt;
}

double TMemeTrend::ComputeQtWordNum(int QtN) {
	TQuote& Qt = QtBs->GetQt(QtN);
	char* CStr = Qt.GetStr().CStr();
	int WrdCnt = 1;
	for (const char *c = CStr; *c; c++) {
		if (TCh::IsWs(*c)) 
			WrdCnt++;
	}
	return double(WrdCnt);	
}

void TMemeTrend::ComputeQtExt(double& ExtNodeNum, double& ExtEdgeNum, const TIntV& DomIdV, const TNodeEDatNet<TInt, TInt>& DomNet, const TIntFltH& TriadH) {
	TIntSet ExtDomIdSet(DomIdV);
	ExtEdgeNum = 0;
	for (int i = 0; i < DomIdV.Len(); i++) {
		if (!DomNet.IsNode(DomIdV[i])) continue;
		TNodeEDatNet<TInt,TInt>::TNodeI NI = DomNet.GetNI(DomIdV[i]);
		for (int e = 0; e < NI.GetInDeg(); e++) {
			int DomId = NI.GetInNId(e);
			ExtDomIdSet.AddKey(DomId);
		}
		ExtEdgeNum += TriadH.GetDat(DomIdV[i]).Val;
	}
	ExtNodeNum = ExtDomIdSet.Len();
}

void TMemeTrend::ComputeQtMixDeg(double& MixInDeg, double& MixOutDeg, double& MixExtInDeg, double& MixExtOutDeg, const TIntV& DomIdV) {
	MixInDeg = 0;
	MixOutDeg = 0;
	for (int i = 0; i < DomIdV.Len(); i++) {
		if (!MixDomNet.IsNode(DomIdV[i])) continue;
		MixInDeg += MixInDegH.GetDat(DomIdV[i]).Val;
		MixOutDeg += MixOutDegH.GetDat(DomIdV[i]).Val;
		MixExtInDeg += MixExtInDegH.GetDat(DomIdV[i]);
		MixExtOutDeg += MixExtOutDegH.GetDat(DomIdV[i]);
	}
}

double TMemeTrend::ComputeQtEntExcessDeg(const TVec<TFltV>& DomAdj) {
	TIntFltH DegCntH;
	for (int i = 0; i < DomAdj.Len(); i++) {
		const TFltV& DataV = DomAdj[i];
		int deg = 0;
		for (int j = 0; j < DataV.Len(); j++)
			if (DataV[j] < 1e9) 
				deg += 1;
		DegCntH.AddDat(deg) += 1;
	}

	double TotDeg = 0;
	for (int i = 0; i < DegCntH.Len(); i++) {
		TotDeg += DegCntH[i];
		DegCntH[i] = DegCntH[i].Val * DegCntH.GetKey(i).Val;
	}

	double EntExcessDeg = 0;
	double AvgDeg = TotDeg / DomAdj.Len(); 
	for (int i = 0; i < DegCntH.Len(); i++) {
		double f = DegCntH[i].Val / AvgDeg + 1e-6;
		EntExcessDeg += - f * log(f);
	}
	return EntExcessDeg;
}

double TMemeTrend::ComputeTimeSpan(int QtN) {
	TQuote& Qt = QtBs->QuoteH[QtN];
	int Len = Qt.GetUrls();
	double TimeSpan = Qt.TmUrlCntV[int(Len * 0.9)].Tm().GetAbsSecs() - Qt.TmUrlCntV[int (Len * 0.1)].Tm().GetAbsSecs();
	return TimeSpan;
}

void TMemeTrend::ComputeQtCompetitionCoeff(double& InCompCoeff, double& OutCompCoeff, const TIntV& DomIdV) {
	InCompCoeff = 0;
	int Tot = 0;
	for (int i = 0; i < DomIdV.Len(); i++) {
		for (int j = i+1; j < DomIdV.Len(); j++) {
			TIntPr Key = TIntPr(DomIdV[i], DomIdV[j]);
			if (InCompH.IsKey(Key)) {
				InCompCoeff += InCompH.GetDat(Key);
				Tot += 1;
			}
		}
	}
	OutCompCoeff = 0;
	for (int i = 0; i < DomIdV.Len(); i++) {
		for (int j = i+1; j < DomIdV.Len(); j++) {
			TIntPr Key = TIntPr(DomIdV[i], DomIdV[j]);
			if (OutCompH.IsKey(Key))
				OutCompCoeff += OutCompH.GetDat(Key);
		}
	}
}

void TMemeTrend::ComputeConnDomIdSet(TIntSet& ConnDomIdSet, const TIntV& DomIdV) {
	PUNGraph SubG = PUNGraph::New();
	ConnDomIdSet.Clr(false);
	for (int i = 0; i < DomIdV.Len(); i++) 
		SubG->AddNode(DomIdV[i]);

	for (int i = 0; i < DomIdV.Len(); i++) 
		for (int j = i+1; j < DomIdV.Len(); j++) {
			if (DomUNGraph->IsEdge(DomIdV[i], DomIdV[j]))
				SubG->AddEdge(DomIdV[i], DomIdV[j]);
		}
	TCnComV CnComV;
	TSnap::GetWccs(SubG, CnComV);

	// Compute connection graph
	for (int i = 0; i < CnComV.Len(); i++) {
		bool DoBFS = false;
		for (int j = 0; j < i; j++) {
			IAssert(CcH.IsKey(CnComV[i].NIdV[0]));
			IAssert(CcH.IsKey(CnComV[j].NIdV[0]));
			if (CcH.GetDat(CnComV[i].NIdV[0]) == CcH.GetDat(CnComV[j].NIdV[0])) {
				DoBFS = true;
				break;
			}
		}

		if (!DoBFS) {
			ConnDomIdSet.AddKeyV(CnComV[i].NIdV);
			continue;
		}
		TIntH FatherH; TIntV Queue; TUNGraph::TNodeI NI;
		for (int j = 0; j < CnComV[i].NIdV.Len(); j++)
			FatherH.AddDat(CnComV[i].NIdV[j]) = -1;
		Queue.AddV(CnComV[i].NIdV);
		int OpenIdx = -1;
		while (OpenIdx < Queue.Len() - 1) {
			OpenIdx += 1;
			NI = DomUNGraph->GetNI(Queue[OpenIdx]);
			int DomIdRec = -1;
			for (int e = 0; e < NI.GetDeg(); e++) {
				int DstDomId = NI.GetOutNId(e);
				if (FatherH.IsKey(DstDomId)) continue;
				FatherH.AddDat(DstDomId) = Queue[OpenIdx];
				Queue.Add(DstDomId);
				if (ConnDomIdSet.IsKey(DstDomId)) {
					DomIdRec = DstDomId;
					break;
				}
			}
			if (DomIdRec >= 0) {
				while (FatherH.GetDat(DomIdRec) >= 0) {
					if (!ConnDomIdSet.IsKey(DomIdRec))
						TMPConnCntH.AddDat(DomIdRec) += 1;
					ConnDomIdSet.AddKey(DomIdRec);
					DomIdRec = FatherH.GetDat(DomIdRec);
				}
				ConnDomIdSet.AddKeyV(CnComV[i].NIdV);
				break;
			}
		}
		IAssert(OpenIdx < Queue.Len() - 1);

	}
}

void TMemeTrend::ComputeFeatureVector(TFltV& FeatureV, const int QtN, TIntV& DomIdV, int MaxN) {
	if (MaxN > 0 && MaxN < DomIdV.Len()) {
		DomIdV.Del(MaxN, DomIdV.Len() - 1);
	}
//printf("1\t");
	TVec<TFltV> LinkDomAdj;
	GetSubGraphFromDomNet(LinkDomAdj, DomIdV, LinkDomNet);

	// Hyperlink graph features PROJ
	{
		double NodeNum = DomIdV.Len();
		FeatureV.Add(NodeNum);

		double EdgeNum = ComputeQtEdgeNum(LinkDomAdj);
		FeatureV.Add(EdgeNum);

		double TriadNum = ComputeQtTriadNum(LinkDomAdj);
		FeatureV.Add(TriadNum);

		double CompNum, GccNodeNum, GccEdgeNum, GccSize;
		ComputeQtGcc(LinkDomAdj, CompNum, GccNodeNum, GccEdgeNum);
		GccSize = GccNodeNum / LinkDomAdj.Len();
		FeatureV.Add(CompNum);
		FeatureV.Add(GccNodeNum);
		FeatureV.Add(GccEdgeNum);
		FeatureV.Add(GccSize);

		double MaxDeg, D0NodeNum, D1NodeNum;
		ComputeQtDeg(LinkDomAdj, MaxDeg, D0NodeNum, D1NodeNum);
		FeatureV.Add(MaxDeg);
		FeatureV.Add(D0NodeNum);
		FeatureV.Add(D1NodeNum);

		double Density = ComputeQtDensity(LinkDomAdj);
		FeatureV.Add(Density);

		double ClusterCoeff = ComputeQtClusterCoeff(LinkDomAdj);
		FeatureV.Add(ClusterCoeff);

		double EntExcessDeg = ComputeQtEntExcessDeg(LinkDomAdj);
		FeatureV.Add(EntExcessDeg);
	}
//printf("2\t");	
	// Non-graphical features NONNET 
	{
		double MBRatio = ComputeQtMBRatio(DomIdV);
		FeatureV.Add(MBRatio);

		double ReportTime = ComputeQtReportTime(DomIdV);
		FeatureV.Add(ReportTime);

		double WordNum = ComputeQtWordNum(QtN);
		double CharNum = QtBs->GetQt(QtN).GetStr().Len() - WordNum + 1;
		FeatureV.Add(WordNum);
		FeatureV.Add(CharNum);
	}
//printf("3\t");
	// Fringe graph features EXT
	{
		double ExtLinkNodeNum, ExtLinkEdgeNum;
		ComputeQtExt(ExtLinkNodeNum, ExtLinkEdgeNum, DomIdV, LinkDomNet, LinkTriadH);
		FeatureV.Add(log(ExtLinkNodeNum));
		FeatureV.Add(log(ExtLinkEdgeNum));
//printf("3.5\t");	
		double ExtCoMNodeNum, ExtCoMEdgeNum;
		ComputeQtExt(ExtCoMNodeNum, ExtCoMEdgeNum, DomIdV, CoMDomNet, CoMTriadH);
		FeatureV.Add(log(ExtCoMNodeNum));
		FeatureV.Add(log(ExtCoMEdgeNum));
	}
//printf("4\t");
	// CoMention graph features	COM
	TVec<TFltV> CoMDomAdj, RawCoMDomAdj;
	GetSubGraphFromDomNet(CoMDomAdj, DomIdV, CoMDomNet);
	GetSubGraphFromDomNet(RawCoMDomAdj, DomIdV, RawCoMDomNet);
	{
		double NodeNum = DomIdV.Len();
		FeatureV.Add(NodeNum);

		double EdgeNum = ComputeQtEdgeNum(CoMDomAdj);
		FeatureV.Add(EdgeNum);

		double TriadNum = ComputeQtTriadNum(CoMDomAdj);
		FeatureV.Add(TriadNum);

		double CompNum, GccNodeNum, GccEdgeNum, GccSize;
		ComputeQtGcc(CoMDomAdj, CompNum, GccNodeNum, GccEdgeNum);
		GccSize = GccNodeNum / CoMDomAdj.Len();
		FeatureV.Add(CompNum);
		FeatureV.Add(GccNodeNum);
		FeatureV.Add(GccEdgeNum);
		FeatureV.Add(GccSize);

		double MaxDeg, D0NodeNum, D1NodeNum;
		ComputeQtDeg(CoMDomAdj, MaxDeg, D0NodeNum, D1NodeNum);
		FeatureV.Add(MaxDeg);
		FeatureV.Add(D0NodeNum);
		FeatureV.Add(D1NodeNum);

		double CoMDensity = ComputeQtDensity(RawCoMDomAdj); // Use Raw Graph
		FeatureV.Add(CoMDensity);

		double CoMClusterCoeff = ComputeQtClusterCoeff(RawCoMDomAdj); // Use Raw Graph
		FeatureV.Add(CoMClusterCoeff);

		double CoMEntExcessDeg = ComputeQtEntExcessDeg(CoMDomAdj);
		FeatureV.Add(CoMEntExcessDeg);
	}
//printf("5\t");
	// Degree distribution features DEG
	{
		double AvgInDeg, AvgOutDeg;
		ComputeQtInOutDeg(LinkDomNet, DomIdV, AvgInDeg, AvgOutDeg);
		FeatureV.Add(AvgInDeg);
		FeatureV.Add(AvgOutDeg);

		double CoMAvgInDeg, CoMAvgOutDeg;
		ComputeQtInOutDeg(CoMDomNet, DomIdV, CoMAvgInDeg, CoMAvgOutDeg);
		FeatureV.Add(CoMAvgInDeg);
		FeatureV.Add(CoMAvgOutDeg);

		double MixInDeg, MixOutDeg, MixExtInDeg, MixExtOutDeg;
		ComputeQtMixDeg(MixInDeg, MixOutDeg, MixExtInDeg, MixExtOutDeg, DomIdV);
		FeatureV.Add(log(MixInDeg));
		FeatureV.Add(log(MixOutDeg));
		FeatureV.Add(log(MixExtInDeg));
		FeatureV.Add(log(MixExtOutDeg));
	}
//printf("6\t");
	// Connection Graph Features CONN
	TIntSet ConnDomIdSet;
	ComputeConnDomIdSet(ConnDomIdSet, DomIdV);

	TVec<TFltV> ConnDomAdj; 
	TIntV ConnDomIdV;
	ConnDomIdSet.GetKeyV(ConnDomIdV);
	GetSubGraphFromDomNet(ConnDomAdj, ConnDomIdV, LinkDomNet);
	{
		double ConnNodeNum = ConnDomIdV.Len();
		FeatureV.Add(ConnNodeNum);

		double ConnEdgeNum = ComputeQtEdgeNum(ConnDomAdj);
		FeatureV.Add(ConnEdgeNum);

		double ConnTriadNum = ComputeQtTriadNum(ConnDomAdj);
		FeatureV.Add(ConnTriadNum);

		double ConnCompNum, ConnGccNodeNum, ConnGccEdgeNum, ConnGccSize;
		ComputeQtGcc(ConnDomAdj, ConnCompNum, ConnGccNodeNum, ConnGccEdgeNum);
		ConnGccSize = ConnGccNodeNum / ConnDomAdj.Len();
		FeatureV.Add(ConnCompNum);
		FeatureV.Add(ConnGccNodeNum);
		FeatureV.Add(ConnGccEdgeNum);
		FeatureV.Add(ConnGccSize);

		double ConnMaxDeg, ConnD0NodeNum, ConnD1NodeNum;
		ComputeQtDeg(ConnDomAdj, ConnMaxDeg, ConnD0NodeNum, ConnD1NodeNum);
		FeatureV.Add(ConnMaxDeg);
		FeatureV.Add(ConnD0NodeNum);
		FeatureV.Add(ConnD1NodeNum);

		double ConnDensity = ComputeQtDensity(ConnDomAdj);
		FeatureV.Add(ConnDensity);

		double ConnClusterCoeff = ComputeQtClusterCoeff(ConnDomAdj);
		FeatureV.Add(ConnClusterCoeff);
	}
//printf("7\t");
	// Weighted hyperlink graph MIX
	{
		TVec<TFltV> MixDomAdj;
		GetSubGraphFromDomNet(MixDomAdj, DomIdV, MixDomNet);
		double MixNodeNum = DomIdV.Len();
		FeatureV.Add(MixNodeNum);

		double MixEdgeNum = ComputeQtEdgeNum(MixDomAdj);
		FeatureV.Add(MixEdgeNum);

		double MixTriadNum = ComputeQtTriadNum(MixDomAdj);
		FeatureV.Add(MixTriadNum);

		double MixCompNum, MixGccNodeNum, MixGccEdgeNum, MixGccSize;
		ComputeQtGcc(MixDomAdj, MixCompNum, MixGccNodeNum, MixGccEdgeNum);
		MixGccSize = MixGccNodeNum / MixDomAdj.Len();
		FeatureV.Add(MixCompNum);
		FeatureV.Add(MixGccNodeNum);
		FeatureV.Add(MixGccEdgeNum);
		FeatureV.Add(MixGccSize);

		double MixMaxDeg, MixD0NodeNum, MixD1NodeNum;
		ComputeQtDeg(MixDomAdj, MixMaxDeg, MixD0NodeNum, MixD1NodeNum);
		FeatureV.Add(MixMaxDeg);
		FeatureV.Add(MixD0NodeNum);
		FeatureV.Add(MixD1NodeNum);

		double MixDensity = ComputeQtDensity(MixDomAdj);
		FeatureV.Add(MixDensity);

		double MixClusterCoeff = ComputeQtClusterCoeff(MixDomAdj);
		FeatureV.Add(MixClusterCoeff);

		double MixInCompCoeff, MixOutCompCoeff;
		ComputeQtCompetitionCoeff(MixInCompCoeff, MixOutCompCoeff, DomIdV);
		FeatureV.Add(MixInCompCoeff);
		FeatureV.Add(MixOutCompCoeff);
	}
//printf("8\t");
	// Time feature TIME
	{
		int BegIdx = int (DomIdV.Len() * 0.2);
		int EndIdx = int (DomIdV.Len() * 0.8);
		double TimeSpan = (QtBs->QuoteH[QtN].TmUrlCntV[EndIdx].Tm().GetAbsSecs() - QtBs->QuoteH[QtN].TmUrlCntV[BegIdx].Tm().GetAbsSecs()) / 3600.0 / 24.0; 
		if (TimeSpan > 5) TimeSpan = 5;
		FeatureV.Add(TimeSpan);
	}
//printf("\n");
}

void TMemeTrend::GetQtDomIdV(TIntV& DomIdV, int QtN, int MaxN, const TTmUnit& TmUnit, int TmCnt) {
	DomIdV.Clr(false);
	TIntSet DomIdSet;
	TQuote& Qt = QtBs->GetQt(QtN);
	const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
	TSecTm EndTm = Qt.TmUrlCntV[0].Tm();
	EndTm += TmUnitSecs * TmCnt;
	int TotDom = 0;
	for (int i = 0; i < Qt.TmUrlCntV.Len(); i++) {
		TSecTm Tm = Qt.TmUrlCntV[i].Tm();
		if (MaxN < 0 && Tm > EndTm) break;		
		if (MaxN >= 0 && TotDom >= MaxN) break;
		TStr DomStr = GetDomNmFromUrl(QtBs->GetStr(Qt.TmUrlCntV[i].UrlId()));
		if (DomIdH.IsKey(TMd5Sig(DomStr))) {
			int DomId = DomIdH.GetDat(TMd5Sig(DomStr));
			if (LinkDomNet.IsNode(DomId) && !DomIdSet.IsKey(DomId)) {
				TotDom += 1;
				if (QtBs->GetUrlTy(Qt.TmUrlCntV[i].UrlId()) == utMedia && IsUseOnlyBlogDom) continue;
				if (QtBs->GetUrlTy(Qt.TmUrlCntV[i].UrlId()) != utMedia && IsUseOnlyMediaDom) continue;
				DomIdSet.AddKey(DomId);
				DomIdV.Add(DomId);
			}
		}
	}	
	if (DomIdSet.Len() == 0) {
		printf("Warning : DomIdSet Len = 0\n");
	}
}

void TMemeTrend::ComputeAllFeatureVectors(int VBegDomNum, THash<TInt, TFltV>& FeatureVH) {
	printf("Computing feature vectors for all quotes with %d beginning mentions\n", VBegDomNum);
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		if (i++ % 100 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		//if (Qt.GetTmDev() > 7) continue;
		if (Qt.GetUrls() < 5) continue;
		TIntV DomIdV;
		GetQtDomIdV(DomIdV, i, VBegDomNum);
		if (DomIdV.Len() < 5) continue; 
		TFltV FeatureV;
		ComputeFeatureVector(FeatureV, i, DomIdV, VBegDomNum);
		FeatureVH.AddDat(i) = FeatureV;
	}

	TStr FNm = TStr::Fmt("%s-Feature%d%d%d%d-%d.bin", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
	{TFOut FOut(FNm);	FeatureVH.Save(FOut);}
}

void TMemeTrend::ComputeAllFeatures() {
	for (int h = 1; h < 20; h++) {
		printf("Computing features for all memes for the first %d hours\n", h);
		TStr FNm = TStr::Fmt("%s-Features-%dHour.bin", Pref.CStr(), h);
		THash<TInt, TFltV> FeatureVH;
		for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
			if (i % 10000 == 0) printf("%d out of %d completed\n", i, QtBs->QuoteH.Len());
			TIntV DomIdV;
			GetQtDomIdV(DomIdV, i, -1, tmu1Hour, h);
			if (DomIdV.Len() < 5) continue;
			TFltV FeatureV;
			ComputeFeatureVector(FeatureV, i, DomIdV, -1);
			FeatureVH.AddDat(i) = FeatureV;
		}
		TFOut FOut(FNm);
		FeatureVH.Save(FOut);
	}
	printf("DONE\n");
}

// Feature properties
void TMemeTrend::PlotPopVSFeatureVal(int VBegDomNum, bool IsFeatureReady) {
	THash<TInt, TFltV> FeatureVH;
	if (IsFeatureReady) {
		TStr FNm = TStr::Fmt("%s-Feature%d%d%d%d-%d.bin", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
		{TFIn FIn(FNm); FeatureVH.Load(FIn);}
	} else {
		ComputeAllFeatureVectors(VBegDomNum, FeatureVH);
	}

	printf("Start computing log popularity versus feature value plot\n");
  THash<TInt, TFltFltH> FSumH, FSqrH;
	THash<TInt, TFltFltH> FCntH;

	for (int i = 0; i < FeatureVH.Len(); i++) {
		int QtN = FeatureVH.GetKey(i);
		TFltV& FeatureV = FeatureVH[i];

		// Log popularity plot
		double Pop = QtBs->QuoteH[QtN].GetUrls();

		for (int j = 0; j < FeatureV.Len(); j++) {
			double BinVal = int(FeatureV[j].Val / IntvlV[j].Val + 0.5) * IntvlV[j].Val;
			TFltFltH& SubSumH = FSumH.AddDat(j), &SubSqrH = FSqrH.AddDat(j), &SubCntH = FCntH.AddDat(j);
			SubSumH.AddDat(BinVal) += Pop;
			SubSqrH.AddDat(BinVal) += Pop * Pop;
			SubCntH.AddDat(BinVal) += 1;
		}
	}
	FSumH.SortByKey(false);
	FSqrH.SortByKey(false);
	FCntH.SortByKey(false);
	for (int i = 0; i < FSumH.Len(); i++) {
		IAssert(FSumH.GetKey(i) == FSqrH.GetKey(i));
		IAssert(FSumH.GetKey(i) == FCntH.GetKey(i));
		TFltFltH& SubSumH = FSumH[i], &SubSqrH = FSqrH[i], &SubCntH = FCntH[i];	
		SubSumH.SortByKey(false);
		SubSqrH.SortByKey(false);
		SubCntH.SortByKey(false);
	}
	for (int i = 0; i < FSumH.Len(); i++) {
		TFltFltH& SubSumH = FSumH[i], &SubSqrH = FSqrH[i], &SubCntH = FCntH[i];
		for (int j = 0; j < SubSumH.Len(); j++) {
			IAssert(SubSumH.GetKey(j) == SubSqrH.GetKey(j));
			IAssert(SubSumH.GetKey(j) == SubCntH.GetKey(j));
			SubSumH[j] /= SubCntH[j].Val;
			double sqrdev = SubSqrH[j].Val / SubCntH[j].Val - (SubSumH[j].Val * SubSumH[j].Val) + 1e-4;
			IAssert(sqrdev >= 0);
			SubSqrH[j] = sqrt(sqrdev);
		}
	}
	// Generate log popularity versus feature value plot
	for (int j = 0; j < FeatureStrV.Len(); j++) {
		TFltPrV XYV;
		TFltV DeltaV;
		TFltFltH& SubSumH = FSumH.GetDat(j), &SubSqrH = FSqrH.GetDat(j), &SubCntH = FCntH.GetDat(j);
		for (int i = 0; i < SubSumH.Len(); i++) {
			if (SubCntH[i] < 10) continue;
			XYV.Add(TFltPr(SubSumH.GetKey(i).Val, SubSumH[i].Val));
			DeltaV.Add(SubSqrH[i].Val);
		}
		TStr FNm = TStr::Fmt("%s-PopFVal%d-%s", Pref.CStr(), VBegDomNum, FeatureStrV[j].ToLc().CStr());
		TGnuPlot Gp(FNm, FeatureStrV[j]); 
		Gp.AddErrBar(XYV, DeltaV);
		Gp.AddPlot(XYV);
		Gp.SetScale(gpsAuto);
		Gp.SavePng();
	}
}

void TMemeTrend::PlotFeatureValDistribution(int VBegDomNum, bool IsFeatureReady) {
	THash<TInt, TFltV> FeatureVH;
	if (IsFeatureReady) {
		TStr FNm = TStr::Fmt("%s-Feature%d%d%d%d-%d.bin", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
		{TFIn FIn(FNm); FeatureVH.Load(FIn);}
	} else {
		ComputeAllFeatureVectors(VBegDomNum, FeatureVH);
	}

	int FeatureLen = FeatureStrV.Len();
	TVec<THash<TFlt, TFlt> > HiValCntHV(FeatureLen, FeatureLen), LoValCntHV(FeatureLen, FeatureLen), RndValCntHV(FeatureLen, FeatureLen);
	double HiCnt = 0, LoCnt = 0, RndCnt = 0;

	for (int i = 0; i < FeatureVH.Len(); i++) {
		int QtN = FeatureVH.GetKey(i);
		TFltV& FeatureV = FeatureVH[i];

		// Log popularity plot
		double Pop = QtBs->QuoteH[QtN].GetUrls();

		// Probability Density stats
		if (Pop > HiThresh) {
			HiCnt += 1;
			for (int j = 0; j < FeatureV.Len(); j++) {
				double BinNum = int(FeatureV[j] / IntvlV[j] + 0.5) * IntvlV[j];
				HiValCntHV[j].AddDat(BinNum) += 1;
			}
		} else if (Pop < LoThresh) {
			LoCnt += 1;
			for (int j = 0; j < FeatureV.Len(); j++) {
				double BinNum = int(FeatureV[j] / IntvlV[j] + 0.5) * IntvlV[j];
				LoValCntHV[j].AddDat(BinNum) += 1;
			}
		}
	}

	// Random baseline
	printf("Computing random baseline stats\n");
	THash<TInt, TFlt> DomCntH; double TotCnt = 0;
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		if (Qt.GetTmDev() > 7) continue;
		for (int j = 0; j < Qt.TmUrlCntV.Len(); j++) {
			TStr UrlStr = QtBs->GetStr(Qt.TmUrlCntV[j].UrlId());
			TStr DomStr = GetDomNmFromUrl(UrlStr);
			TMd5Sig DomSig(DomStr);
			if (!DomIdH.IsKey(DomSig)) continue;
			int DomId = DomIdH.GetDat(DomSig);
			if (!LinkDomNet.IsNode(DomId)) continue;
			DomCntH.AddDat(DomId) += 1;
			TotCnt += 1;
		}
	}

	// Cumulative distribution, for random domain id
	for (int i = 0; i < DomCntH.Len(); i++) {
		DomCntH[i] = DomCntH[i] / TotCnt;
		if (i > 0) DomCntH[i] = DomCntH[i] + DomCntH[i-1];
	}
	printf("DomCntH Len = %d\n", DomCntH.Len());
	IAssert(fabs(DomCntH[DomCntH.Len()-1].Val - 1) < 1e-6);
	TRnd RndS; RndS.Randomize();
	for (int i = 0; i < 20000; i++) {
		if (i % 2000 == 0) printf("%d out of %d completed\n", i, 20000);
		TIntSet DomIdSet;	
		while (DomIdSet.Len() < VBegDomNum) {
			double s = RndS.GetUniDev();
			int lo = 0, hi = DomCntH.Len() - 1;
			while (lo < hi) {
				int mid = (lo + hi) / 2;
				IAssert(mid >= 0 && mid < DomCntH.Len() - 1);
				if (DomCntH[mid] >= s) hi = mid; else lo = mid+1;
			}
			IAssert(lo < DomCntH.Len() && lo >= 0);
			DomIdSet.AddKey(DomCntH.GetKey(lo));
		}
		TIntV DomIdV; 
		for (int j = 0; j < DomIdSet.Len(); j++) 
			DomIdV.Add(DomIdSet[j]);
		TFltV FeatureV;
		ComputeFeatureVector(FeatureV, int(RndS.GetUniDev()*QtBs->QuoteH.Len()), DomIdV, VBegDomNum); 

		RndCnt += 1;
		for (int j = 0; j < FeatureV.Len(); j++) {
			double BinNum = int(FeatureV[j] / IntvlV[j] + 0.5) * IntvlV[j];
			RndValCntHV[j].AddDat(BinNum) += 1;
		}
	}
	for (int j = 0; j < FeatureStrV.Len(); j++) {
		for (int i = 0; i < HiValCntHV[j].Len(); i++)
			HiValCntHV[j][i] = HiValCntHV[j][i].Val / HiCnt;
		for (int i = 0; i < LoValCntHV[j].Len(); i++)
			LoValCntHV[j][i] = LoValCntHV[j][i].Val / LoCnt;
		for (int i = 0; i < RndValCntHV[j].Len(); i++)
			RndValCntHV[j][i] = RndValCntHV[j][i].Val / RndCnt;
		TStr FNm = TStr::Fmt("%s-FValPdf%d-%s", Pref.CStr(), VBegDomNum, FeatureStrV[j].ToLc().CStr());
		TGnuPlot::PlotValCntH(HiValCntHV[j], "High Vol", LoValCntHV[j], "Low Vol", RndValCntHV[j], "Random", FNm, FeatureStrV[j], "Value", "Probability Density"); 
	}
}

// Machine learning tasks
void TMemeTrend::DumpClassificationTaskData(int VBegDomNum, bool IsFeatureReady) {
	THash<TInt, TFltV> FeatureVH;
	if (IsFeatureReady) {
		TStr FNm = TStr::Fmt("%s-Feature%d%d%d%d-%d.bin", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
		{TFIn FIn(FNm); FeatureVH.Load(FIn);}
	} else {
		ComputeAllFeatureVectors(VBegDomNum, FeatureVH);
	}

	TIntH LabelH;
	for (int i = 0; i < FeatureVH.Len(); i++) {
		int QtN = FeatureVH.GetKey(i);
		int Pop = QtBs->QuoteH[QtN].GetUrls();
		if (Pop >= HiThresh) LabelH.AddDat(QtN) = 1;
		if (Pop <= LoThresh) LabelH.AddDat(QtN) = 0;
	}
		
	// Dump feature stats to MATLAB for regression
	TRnd Rnd; Rnd.Randomize();
	LabelH.SortByDat(true); 
	int PosSampleNum = 0, NegSampleNum = 0;
	for (int i = 0; i < LabelH.Len(); i++) {
		if (LabelH[i] == 1) PosSampleNum += 1; else NegSampleNum += 1;
	}
	printf("Pos sample number = %d, Neg sample number = %d\n", PosSampleNum, NegSampleNum);

	int PSampleNum = TMath::Mn(1000, PosSampleNum);
	int NSampleNum = TMath::Mn(PSampleNum*5, NegSampleNum);
	TIntV LabelV; TVec<TFltV> FeatureVV; TIntV QtIdV;
	TIntSet NegSet;
	while (NegSet.Len() < NSampleNum) {
		NegSet.AddKey(int(Rnd.GetUniDev()*NegSampleNum));
	}
	for (int i = 0; i < NegSet.Len(); i++) {
		int QtN = LabelH.GetKey(NegSet[i]);
		IAssert(LabelH.GetDat(QtN) == 0);
		LabelV.Add(0);
		TFltV& FeatureV = FeatureVH.GetDat(QtN);
		FeatureVV.Add(FeatureV);
		QtIdV.Add(QtN);
	}
	TIntSet PosSet;
	if (PSampleNum*2 < PosSampleNum) {
		while (PosSet.Len() < PSampleNum) {
			PosSet.AddKey(NegSampleNum + int(Rnd.GetUniDev()*PosSampleNum));
		}
	} else {
		int idx = 0;
		while (PosSet.Len() < PSampleNum) {
			if (Rnd.GetUniDev() * PosSampleNum < PSampleNum)
				PosSet.AddKey(NegSampleNum + idx);
			idx = (idx+1) % PosSampleNum;
		}
	}
	IAssert(PosSet.Len() == PSampleNum);
	IAssert(NegSet.Len() == NSampleNum);
	for (int i = 0; i < PosSet.Len(); i++) {
		int QtN = LabelH.GetKey(PosSet[i]);
		IAssert(LabelH.GetDat(QtN) == 1);
		LabelV.Add(1);
		TFltV& FeatureV = FeatureVH.GetDat(QtN);
		FeatureVV.Add(FeatureV);
		QtIdV.Add(QtN);
	}

	// Logistic regression based on feature
	TStr FNm1 = TStr::Fmt("ML_%s-FReg%d%d%d%d-%d.txt", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
	FILE* FML1 = fopen(FNm1.CStr(), "w");

	fprintf(FML1, "%d\t%d\t%d\n", NSampleNum, PSampleNum, FeatureStrV.Len());
	for (int i = 0; i < LabelV.Len(); i++) 
		fprintf(FML1, "%d\t", LabelV[i].Val);
	fprintf(FML1, "\n");
	for (int i = 0; i < FeatureVV.Len(); i++) {
		for (int j = 0; j < FeatureVV[i].Len(); j++) {
			fprintf(FML1, "%f\t", FeatureVV[i][j].Val);
		}
		fprintf(FML1, "\n");
	}
	for (int i = 0; i < QtIdV.Len(); i++)
		fprintf(FML1, "%d\t", QtIdV[i].Val);
	fclose(FML1);

	FILE* FSTATS1 = fopen("STATS_FeatureRegress.txt", "w");
	for (int i = 0; i < QtIdV.Len(); i++) {
		int QtN = QtIdV[i];
		TQuote& Qt = QtBs->QuoteH[QtN];
		fprintf(FSTATS1, "%d\t%s\t%d\n", Qt.GetUrls(), Qt.GetStr().CStr(), QtN);
		for (int u = 0; u < Qt.GetUrls(); u++) {
			fprintf(FSTATS1, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
			QtBs->GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", QtBs->GetStr(Qt.TmUrlCntV[u].UrlId()));
		}
		for (int j = 0; j < FeatureVV[i].Len(); j++) 
			fprintf(FSTATS1, "%d %s = %.02lf\n", j+1, FeatureStrV[j].CStr(), FeatureVV[i][j].Val); 
	}
	fclose(FSTATS1);
	
	// Logistic regression based on domain
	TStr FNm2 = TStr::Fmt("ML_%s-DReg%d%d%d%d-%d.txt", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
	FILE* FML2 = fopen(FNm2.CStr(), "w");
	TIntH MLDomCntH;
	for (int i = 0; i < PSampleNum; i++) {
		int QtN1 = LabelH.GetKey(PosSet[i]); 
		TIntV DomIdV1;
		GetQtDomIdV(DomIdV1, QtN1, VBegDomNum);
		for (int j = 0; j < DomIdV1.Len(); j++)
			MLDomCntH.AddDat(DomIdV1[j]) += 1;
	}
	for (int i = 0; i < NSampleNum; i++) {
		int QtN2 = LabelH.GetKey(NegSet[i]);
		TIntV DomIdV2;
		GetQtDomIdV(DomIdV2, QtN2, VBegDomNum);
		for (int j = 0; j < DomIdV2.Len(); j++) 
			MLDomCntH.AddDat(DomIdV2[j]) += 1;
	}

	TIntSet MLDomSet;
	for (int i = 0; i < MLDomCntH.Len(); i++)
		if (MLDomCntH[i] > 1) MLDomSet.AddKey(MLDomCntH.GetKey(i));
	fprintf(FML2, "%d\t%d\t%d\n", NSampleNum, PSampleNum, MLDomSet.Len());
	for (int i = 0; i < PSampleNum; i++) 
		fprintf(FML2, "1\t");
	for (int i = 0; i < NSampleNum; i++)
		fprintf(FML2, "0\t");
	fprintf(FML2, "\n");

	for (int i = 0; i < PosSet.Len(); i++) {
		int QtN = LabelH.GetKey(PosSet[i]);
		TIntV DomIdV;
		GetQtDomIdV(DomIdV, QtN, VBegDomNum); 
		TIntSet DomIdSet(DomIdV);
		for (int j = 0; j < MLDomSet.Len(); j++) 
			if (DomIdSet.IsKey(MLDomSet[j])) 
				fprintf(FML2, "1\t");
			else 
				fprintf(FML2, "0\t");
		fprintf(FML2, "\n");
	}
	for (int i = 0; i < NegSet.Len(); i++) {
		int QtN = LabelH.GetKey(NegSet[i]);
		TIntV DomIdV;
		GetQtDomIdV(DomIdV, QtN, VBegDomNum);
		TIntSet DomIdSet(DomIdV);
		for (int j = 0; j < MLDomSet.Len(); j++) 
			if (DomIdSet.IsKey(MLDomSet[j])) 
				fprintf(FML2, "1\t");
			else 
				fprintf(FML2, "0\t");
		fprintf(FML2, "\n");
	}
	fclose(FML2);
	printf("DONE\n");
}

void TMemeTrend::DumpDifferentiationTaskData(const TIntPrV& QtPrV, int VBegDomNum, bool IsFeatureReady) {
	THash<TInt, TFltV> FeatureVH;
	if (IsFeatureReady) {
		TStr FNm = TStr::Fmt("%s-Feature%d%d%d%d-%d.bin", Pref.CStr(), LoThresh, HiThresh, IsUseOnlyBlogDom, IsUseOnlyMediaDom, VBegDomNum);
		{TFIn FIn(FNm); FeatureVH.Load(FIn);}
	} else {
		ComputeAllFeatureVectors(VBegDomNum, FeatureVH);
	}

	printf("Discrimination Task\n");
	TStr FNm = TStr::Fmt("ML_%s-DifTask-%d%.0f%.0f.txt", Pref.CStr(), VSameCount, VMinMultiple, VMaxDifference*10);

	FILE* F = fopen(FNm.CStr(), "w");
	fprintf(F, "%d\n", QtPrV.Len());

	for (int i = 0; i < QtPrV.Len(); i++) {
		int QtN1 = QtPrV[i].Val1; int QtN2 = QtPrV[i].Val2;
		//printf("QtN1 = %d, QtN2 = %d, Urls2 = %d\n", QtN1, QtN2, QtBs->QuoteH[QtN2].GetUrls());		
		TIntV DomIdV1, DomIdV2;
		TFltV FeatureV1, FeatureV2;
		GetQtDomIdV(DomIdV1, i, VBegDomNum);
		GetQtDomIdV(DomIdV2, i, VBegDomNum);
		ComputeFeatureVector(FeatureV1, QtN1, DomIdV1, VBegDomNum);
		ComputeFeatureVector(FeatureV2, QtN2, DomIdV2, VBegDomNum);
		FeatureVH.AddDat(QtN1) = FeatureV1;
		FeatureVH.AddDat(QtN2) = FeatureV2;
		for (int j = 0; j < FeatureStrV.Len(); j++) {
			fprintf(F, "%f\t", FeatureV1[j].Val);
		}
		fprintf(F, "\n");
		for (int j = 0; j < FeatureStrV.Len(); j++) {
			fprintf(F, "%f\t", FeatureV2[j].Val);
		}
		fprintf(F, "\n");
	}
	fclose(F);


	FNm = TStr::Fmt("ML_%s-DifReg-%d%.0f%.0f.txt", Pref.CStr(), VSameCount, VMinMultiple, VMaxDifference*10);
	FILE* F2 = fopen(FNm.CStr(), "w");
	fprintf(F2, "%d\t%d\n", QtPrV.Len(), FeatureStrV.Len());

	for (int i = 0; i < QtPrV.Len(); i++) {
		int QtN1 = QtPrV[i].Val1; int QtN2 = QtPrV[i].Val2;
		TFltV& FeatureV1 = FeatureVH.GetDat(QtN1);
		TFltV& FeatureV2 = FeatureVH.GetDat(QtN2);
		for (int j = 0; j < FeatureStrV.Len(); j++) {
			fprintf(F2, "%f\t", FeatureV1[j].Val - FeatureV2[j].Val);
		}
		fprintf(F2, "\n");
		for (int j = 0; j < FeatureStrV.Len(); j++) {
			fprintf(F2, "%f\t", FeatureV2[j].Val - FeatureV1[j].Val);
		}
		fprintf(F2, "\n");
	}
	fclose(F2);
}

void TMemeTrend::DumpNaiveBayesTaskData(int VBegDomNum) {
	TIntFltH HDomCntH, LDomCntH;
	double HCnt = 0, LCnt = 0;

	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		if (Qt.GetTmDev() > 7) continue;
		if (Qt.GetUrls() < VBegDomNum) continue;
		if (Qt.GetUrls() > LoThresh && Qt.GetUrls() < HiThresh) continue;
		TIntV DomIdV;
		GetQtDomIdV(DomIdV, i, VBegDomNum);
		if (DomIdV.Len() < VBegDomNum) continue;
		bool Vol = 0;
		if (Qt.GetUrls() >= HiThresh) Vol = 1;
		if (Vol == 1)
			HCnt += 1;
		else
			LCnt += 1;
		for (int j = 0; j < DomIdV.Len(); j++) {
			if (Vol == 1) {
				HDomCntH.AddDat(DomIdV[j].Val) += 1;
				LDomCntH.AddDat(DomIdV[j].Val) += 0;
			} else {
				HDomCntH.AddDat(DomIdV[j].Val) += 0;
				LDomCntH.AddDat(DomIdV[j].Val) += 1;
			}
		}
	}
	HDomCntH.SortByKey(true);
	LDomCntH.SortByKey(true);

	TFltIntPrV LogRV;
	for (int i = 0; i < HDomCntH.Len(); i++) {
		IAssert(HDomCntH.GetKey(i) == LDomCntH.GetKey(i));
		if (HDomCntH[i] + LDomCntH[i] < 50) continue;
		int DomId = HDomCntH.GetKey(i);
		if (!LinkDomNet.IsNode(DomId)) continue;
		if (LinkDomNet.GetNI(DomId).GetInDeg() < 10) continue;
		if (LinkDomNet.GetNI(DomId).GetOutDeg() < 10) continue;
		HDomCntH[i] = (HDomCntH[i].Val + 1) / (HCnt + 2);
		LDomCntH[i] = (LDomCntH[i].Val + 1) / (LCnt + 2);
		LogRV.Add(TFltIntPr(log(HDomCntH[i].Val/LDomCntH[i].Val), HDomCntH.GetKey(i).Val));
	}
	printf("LogRV Len = %d\n", LogRV.Len());
	LogRV.Sort(false);
	
	TStr FNm = TStr::Fmt("STATS_%sBayes%d.txt", Pref.CStr(), VBegDomNum);
	FILE* FBayes = fopen(FNm.CStr(), "w");
	fprintf(FBayes, "HCnt = %.0f, LCnt = %.0f\n", HCnt, LCnt);
	for (int i = 0; i < LogRV.Len(); i++) {
		int DomId = LogRV[i].Val2;
		fprintf(FBayes, "%.2f\t%.1f\t%.1f\t\t%s\n", LogRV[i].Val1.Val, HDomCntH.GetDat(DomId).Val*(HCnt+2)-1, LDomCntH.GetDat(DomId).Val*(LCnt+2)-1, DomStrH.GetDat(DomId).CStr());
	}
	fclose(FBayes);
	
}

// Feature evolution
void  TMemeTrend::EvoComputeCompCoeff(double& CpIn, double& CpOut, TIntV& DomIdV, int N) {
	CpIn = 0;
	for (int i = 0; i < N; i++) {
		for (int j = i+1; j < N; j++) {
			TIntPr Key = TIntPr(DomIdV[i], DomIdV[j]);
			if (InCompH.IsKey(Key))
				CpIn += InCompH.GetDat(Key);
		}
	}
	CpOut = 0;
	for (int i = 0; i < N; i++) {
		for (int j = i+1; j < N; j++) {
			TIntPr Key = TIntPr(DomIdV[i], DomIdV[j]);
			if (OutCompH.IsKey(Key))
				CpOut += OutCompH.GetDat(Key);
		}
	}
	if (N > 2) {
		CpIn /= N*(N-1)/2;
		CpOut /= N*(N-1)/2;
	}
}

void TMemeTrend::EvoComputePDenCC(double& PDen, double& PCC, TIntV& DomIdV, int N) {
	PDen = 0;
	for (int i = 0; i < N; i++) 
		for (int j = i+1; j < N; j++) {
			double BestVal = 1e9;
			if (LinkDomNet.IsEdge(DomIdV[i], DomIdV[j]) && LinkDomNet.GetEDat(DomIdV[i], DomIdV[j]).Val < BestVal)
				BestVal = LinkDomNet.GetEDat(DomIdV[i], DomIdV[j]);
			if (LinkDomNet.IsEdge(DomIdV[j], DomIdV[i]) && LinkDomNet.GetEDat(DomIdV[j], DomIdV[i]).Val < BestVal)
				BestVal = LinkDomNet.GetEDat(DomIdV[j], DomIdV[i]);
			if (BestVal == 1e9) continue;
			PDen += BestVal;
		}
	if (N >= 2) {
		PDen /= N*(N-1)/2;
	}
	PCC = 0;
	// Todo complete PCC
}

void TMemeTrend::EvoComputeCDenCC(double& CDen, double& CCC, TIntV& DomIdV, int N) {
	CDen = 0;
	for (int i = 0; i < N; i++) 
		for (int j = i+1; j < N; j++){
			if (RawCoMDomNet.IsEdge(DomIdV[i], DomIdV[j]) && RawCoMDomNet.GetEDat(DomIdV[i], DomIdV[j]).Val < 1e9) {
				CDen += RawCoMDomNet.GetEDat(DomIdV[i], DomIdV[j]).Val / double(TMath::Mn(RawCoMDomNet.GetNDat(DomIdV[i]).Val, RawCoMDomNet.GetNDat(DomIdV[j]).Val));
			}
		}
	if (N >= 2) {
		CDen /= N*(N-1)/2;
	}
	CCC = 0;
	// Todo complete PCC
}

void TMemeTrend::EvoComputeMixDeg(double& DegIn, double& DegOut, TIntV& DomIdV, int N) {
	DegIn = 0; 
	DegOut = 0;
	for (int i = 0; i < N; i++) {
		DegIn += MixInDegH.AddDat(DomIdV[i]);
		DegOut += MixOutDegH.AddDat(DomIdV[i]);
	}
	if (N > 0) {
		DegIn /= N;
		DegOut /= N;
	}
}

void TMemeTrend::PlotFeatureEvolution(TIntFltH& SumH, TIntFltH& SqrH, double TotCnt, TStr FNm, TStr Desc) {
	for (int i = 0; i < SumH.Len(); i++) {
		IAssert(SumH.GetKey(i) == SqrH.GetKey(i));
		SumH[i] /= TotCnt;
		SqrH[i] = SqrH[i].Val / TotCnt - SumH[i].Val * SumH[i].Val;
		IAssert(SqrH[i] >= 0);
		SqrH[i] = sqrt(SqrH[i].Val);
	}

	TFltPrV XYV;
	TFltV DeltaV;
	for (int i = 0; i < SumH.Len(); i++) {
		XYV.Add(TFltPr(SumH.GetKey(i).Val, SumH[i].Val));
		DeltaV.Add(SqrH[i].Val);
	}

	TGnuPlot Gp(FNm, Desc); 
	Gp.AddErrBar(XYV, DeltaV);
	Gp.AddPlot(XYV);
	Gp.SetScale(gpsAuto);
	Gp.SavePng();
}

void TMemeTrend::PlotFeatureEvolution(TIntFltH& HiSumH, TIntFltH& LoSumH, TStr FNm, TStr Desc) {
	TGnuPlot::PlotValCntH(HiSumH, "High Vol", LoSumH, "Low Vol", FNm, Desc, "# of domains", "Feature value");
}

void TMemeTrend::FeatureEvolution(int VBegDomNum) {
	printf("Examining feature evolution\n");

	TIntFltH HiPDenSumH, HiPDenSqrH, HiPCCSumH, HiPCCSqrH;
	TIntFltH HiCDenSumH, HiCDenSqrH, HiCCCSumH, HiCCCSqrH;
	TIntFltH HiDegInSumH, HiDegInSqrH;
	TIntFltH HiCpInSumH, HiCpInSqrH, HiCpOutSumH, HiCpOutSqrH;
	{ // High Vol
		double TotCnt = 0;
		for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
			if (i++ % 10000 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
			TQuote& Qt = QtBs->QuoteH[i];
			if (Qt.GetTy() == qtCentr) continue;
			if (Qt.GetTmDev() > 7) continue;
			if (Qt.GetUrls() < HiThresh) continue;
			if (Qt.GetUrls() < VBegDomNum) continue;
			TIntV DomIdV;
			GetQtDomIdV(DomIdV, i, VBegDomNum);
			TotCnt += 1;
			for (int j = 2; j < DomIdV.Len(); j++) {
				double PDen, PCC, CDen, CCC, DegIn, DegOut, CpIn, CpOut;
				EvoComputePDenCC(PDen, PCC, DomIdV, j);
				EvoComputeCDenCC(CDen, CCC, DomIdV, j);
				EvoComputeMixDeg(DegIn, DegOut, DomIdV, j);
				EvoComputeCompCoeff(CpIn, CpOut, DomIdV, j);
				HiPDenSumH.AddDat(j) += PDen; HiPDenSqrH.AddDat(j) += PDen * PDen;
				HiCDenSumH.AddDat(j) += CDen; HiCDenSqrH.AddDat(j) += CDen * CDen;
				HiDegInSumH.AddDat(j) += DegIn; HiDegInSqrH.AddDat(j) += DegIn * DegIn;
				HiCpInSumH.AddDat(j) += CpIn; HiCpInSqrH.AddDat(j) += CpIn * CpIn;
				HiCpOutSumH.AddDat(j) += CpOut; HiCpOutSqrH.AddDat(j) += CpOut * CpOut;
			}
		}

		HiPDenSumH.SortByKey(true); HiPDenSqrH.SortByKey(true);
		HiCDenSumH.SortByKey(true); HiCDenSqrH.SortByKey(true);
		HiDegInSumH.SortByKey(true); HiDegInSqrH.SortByKey(true);
		HiCpInSumH.SortByKey(true); HiCpInSqrH.SortByKey(true);
		HiCpOutSumH.SortByKey(true); HiCpOutSqrH.SortByKey(true);
		PlotFeatureEvolution(HiPDenSumH, HiPDenSqrH, TotCnt, "TEST_EvoHiPDen", "Projection graph density");
		PlotFeatureEvolution(HiCDenSumH, HiCDenSqrH, TotCnt, "TEST_EvoHiCDen", "CoMention graph density");
		PlotFeatureEvolution(HiDegInSumH, HiDegInSqrH, TotCnt, "TEST_EvoHiDegIn", "Average Mix Graph In Degrees");
		PlotFeatureEvolution(HiCpInSumH, HiCpInSqrH, TotCnt, "TEST_EvoHiCpIn", "Subscriber competition coefficient");
		PlotFeatureEvolution(HiCpOutSumH, HiCpOutSqrH, TotCnt, "TEST_EvoHiCpOut", "Source competition coefficient");
	}
	TIntFltH LoPDenSumH, LoPDenSqrH, LoPCCSumH, LoPCCSqrH;
	TIntFltH LoCDenSumH, LoCDenSqrH, LoCCCSumH, LoCCCSqrH;
	TIntFltH LoDegInSumH, LoDegInSqrH;
	TIntFltH LoCpInSumH, LoCpInSqrH, LoCpOutSumH, LoCpOutSqrH;
	{ // Low Vol
		double TotCnt = 0;
		for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
			if (i++ % 10000 == 0) printf("%d out of %d Completed\n", i, QtBs->QuoteH.Len());
			TQuote& Qt = QtBs->QuoteH[i];
			if (Qt.GetTy() == qtCentr) continue;
			if (Qt.GetTmDev() > 7) continue;
			if (Qt.GetUrls() < VBegDomNum) continue;
			if (Qt.GetUrls() > LoThresh) continue;
			TIntV DomIdV;
			GetQtDomIdV(DomIdV, i, VBegDomNum);
			if (DomIdV.Len() < VBegDomNum) continue;
			TotCnt += 1;
			for (int j = 2; j < DomIdV.Len(); j++) {
				double PDen, PCC, CDen, CCC, DegIn, DegOut, CpIn, CpOut;
				EvoComputePDenCC(PDen, PCC, DomIdV, j);
				EvoComputeCDenCC(CDen, CCC, DomIdV, j);
				EvoComputeMixDeg(DegIn, DegOut, DomIdV, j);
				EvoComputeCompCoeff(CpIn, CpOut, DomIdV, j);
				LoPDenSumH.AddDat(j) += PDen; LoPDenSqrH.AddDat(j) += PDen * PDen;
				LoCDenSumH.AddDat(j) += CDen; LoCDenSqrH.AddDat(j) += CDen * CDen;
				LoDegInSumH.AddDat(j) += DegIn; LoDegInSqrH.AddDat(j) += DegIn * DegIn;
				LoCpInSumH.AddDat(j) += CpIn; LoCpInSqrH.AddDat(j) += CpIn * CpIn;
				LoCpOutSumH.AddDat(j) += CpOut; LoCpOutSqrH.AddDat(j) += CpOut * CpOut;
			}
		}

		LoPDenSumH.SortByKey(true); LoPDenSqrH.SortByKey(true);
		LoCDenSumH.SortByKey(true); LoCDenSqrH.SortByKey(true);
		LoDegInSumH.SortByKey(true); LoDegInSqrH.SortByKey(true);
		LoCpInSumH.SortByKey(true); LoCpInSqrH.SortByKey(true);
		LoCpOutSumH.SortByKey(true); LoCpOutSqrH.SortByKey(true);
		PlotFeatureEvolution(LoPDenSumH, LoPDenSqrH, TotCnt, "TEST_EvoLoPDen", "Projection graph density");
		PlotFeatureEvolution(LoCDenSumH, LoCDenSqrH, TotCnt, "TEST_EvoLoCDen", "CoMention graph density");
		PlotFeatureEvolution(LoDegInSumH, LoDegInSqrH, TotCnt, "TEST_EvoLoDegIn", "Average Mix Graph In Degrees");
		PlotFeatureEvolution(LoCpInSumH, LoCpInSqrH, TotCnt, "TEST_EvoLoCpIn", "Subscriber competition coefficient");
		PlotFeatureEvolution(LoCpOutSumH, LoCpOutSqrH, TotCnt, "TEST_EvoLoCpOut", "Source competition coefficient");
	}
	PlotFeatureEvolution(HiPDenSumH, LoPDenSumH, "TEST_EvoCmpPDen", "Projection graph density");
	PlotFeatureEvolution(HiCDenSumH, LoCDenSumH, "TEST_EvoCmpCDen", "CoMention graph density");
	PlotFeatureEvolution(HiDegInSumH, LoDegInSumH, "TEST_EvoCmpDegIn", "Average Mix Graph In Degrees");
	PlotFeatureEvolution(HiCpInSumH, LoCpInSumH, "TEST_EvoCmpCpIn", "SubScriber competition coefficient");
	PlotFeatureEvolution(HiCpOutSumH, LoCpOutSumH, "TEST_EvoCmpCpOut", "Source competition coefficient");
}

// Meme Trend
// Compute signature of the quote
void TMemeTrend::ComputeQtSig(TQuote& Qt, TQuote::TTmFltPrV& SmoothFqOtV, const TTmUnit& TmUnit, int TmCnt, TSecTm BegTime) {	
	if (Qt.TmUrlCntV.Len() == 0)
		return;

	const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
	Qt.TmUrlCntV.Sort(true);
	THash<TSecTm, TFlt> TmCntH;
	TQuote::TTmFltPrV RawFqOtV;

	TSecTm BegTm;
	if (BegTime.GetAbsSecs() == 0)
		BegTm = Qt.TmUrlCntV[0].Tm().Round(TmUnit);
	else 
		BegTm = BegTime.Round(TmUnit);
	TSecTm EndTm(BegTm.GetAbsSecs() + TmCnt * TmUnitSecs);
	for (TSecTm Tm = BegTm; Tm < EndTm; Tm += TmUnitSecs) 
		TmCntH.AddDat(Tm) = 0;

	for (int i = 0; i < Qt.TmUrlCntV.Len(); i++) {
		TStr UrlStr = QtBs->GetStr(Qt.TmUrlCntV[i].UrlId());
		TStr DomStr = GetDomNmFromUrl(UrlStr);
		if (!DomIdH.GetDat(TMd5Sig(DomStr))) continue;
		int DomId = DomIdH.GetDat(TMd5Sig(DomStr)).Val;
		if (!LinkDomNet.IsNode(DomId)) continue;
    const TSecTm Tm = Qt.TmUrlCntV[i].Tm().Round(TmUnit);
		if (Tm < BegTm) continue;
    TmCntH.AddDat(Tm) += 1;
	}
	
	TmCntH.SortByKey();
  TmCntH.GetKeyDatPrV(RawFqOtV);
	TQuote::GetSmoothFqOt(SmoothFqOtV, RawFqOtV, TmUnit, 6*3600/TmUnitSecs, 3, QtBs->MnTm, QtBs->MxTm);

	int IdxRec = 0;
	for (int i = 0; i < SmoothFqOtV.Len(); i++) {
		if (SmoothFqOtV[i].Val1 >= BegTm) {
			IdxRec = i;
			break;
		}
	}
	if (IdxRec > 0) {
		SmoothFqOtV.Del(0, IdxRec-1);
	}
	//SmoothFqOtV = RawFqOtV; // no smoothing
}

// Compute signature of the quote
void TMemeTrend::ComputeQtSig(int QtN, TQuote::TTmFltPrV& SmoothFqOtV, const TTmUnit& TmUnit, int TmCnt, TSecTm BegTime) {	
	ComputeQtSig(QtBs->QuoteH[QtN], SmoothFqOtV, TmUnit, TmCnt, BegTime);
}

// Convert data to the output type
void TMemeTrend::GetTmFltDataPr(TFltPrV& Data, TQuote::TTmFltPrV& SigV, const TTmUnit& TmUnit) {
	TSecTm& BegTm = SigV[0].Val1;
	const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
	for (int i = 0; i < SigV.Len(); i++) {
		int BinNm = (SigV[i].Val1.GetAbsSecs() - BegTm.GetAbsSecs()) / TmUnitSecs;
		if (BinNm > 100) continue;
		Data.Add(TFltPr(double(BinNm), SigV[i].Val2));
	}
	Data.Sort(true);
}

// Plot pair clusters on the same plot
void TMemeTrend::PlotClusterPair(TStr& OutFNm, int QtN1, int QtN2, TQuote::TTmFltPrV& SigV1, TQuote::TTmFltPrV& SigV2, const TTmUnit& TmUnit) {
	TStr Desc = TStr::Fmt("Red Volume: %d; Blue Volume: %d", QtBs->GetQt(QtN1).GetUrls(), QtBs->GetQt(QtN2).GetUrls());
	TGnuPlot Gp(OutFNm, Desc);
	TFltPrV Data1, Data2;
	GetTmFltDataPr(Data1, SigV1, TmUnit);
	GetTmFltDataPr(Data2, SigV2, TmUnit);
	Gp.AddPlot(Data1, gpwLinesPoints, QtBs->GetQt(QtN1).GetStr());
	Gp.AddPlot(Data2, gpwLinesPoints, QtBs->GetQt(QtN2).GetStr());
	Gp.SetXYLabel("TmUnit", "Count");
	Gp.SavePng();	

	FILE* F = fopen("STATS_TopClustTrend.txt", "a");
	fprintf(F, "\n\n%s\n", OutFNm.CStr());
	{
		const TQuote& Qt = QtBs->GetQt(QtN1);
		fprintf(F, "\n%d\t%s\n", Qt.GetUrls(), Qt.GetStr().CStr());
		for (int u = 0; u < Qt.GetUrls(); u++) {
			fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
				QtBs->GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", QtBs->GetStr(Qt.TmUrlCntV[u].UrlId()));
		}
  }
	{
		const TQuote& Qt = QtBs->GetQt(QtN2);
		fprintf(F, "\n%d\t%s\n", Qt.GetUrls(), Qt.GetStr().CStr());
		for (int u = 0; u < Qt.GetUrls(); u++) {
			fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
				QtBs->GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", QtBs->GetStr(Qt.TmUrlCntV[u].UrlId()));
		}
  }
	fclose(F);
}

void TMemeTrend::DumpClusterPairHTML(TStr& FN, int QtN1, int QtN2, const TTmUnit& TmUnit, const int& TmCnt) {
	TSecTm TmRef(2000,1,1);
	const int TmUnitSecs = TTmInfo::GetTmUnitSecs(TmUnit);
	TSecTm EndTm(TmRef.GetAbsSecs() + TmCnt * TmUnitSecs);
	const TQuote& Qt1 = QtBs->GetQt(QtN1);
	const TQuote& Qt2 = QtBs->GetQt(QtN2);
	uint TmAdj1 = Qt1.TmUrlCntV[0].Tm().GetAbsSecs() - TmRef.GetAbsSecs();
	uint TmAdj2 = Qt2.TmUrlCntV[0].Tm().GetAbsSecs() - TmRef.GetAbsSecs(); 

	TStr HTMLFN = FN + TStr(".html");
	FILE* F = fopen(HTMLFN.CStr(), "w");
	fprintf(F, "<html>\n<head>\n");
	fprintf(F, "<link rel=\"stylesheet\" type=\"text/css\" href=\"QtTrend.css\" />\n");
	fprintf(F, "</head>\n\n<body>\n");

	fprintf(F, "<img src=\"%s.png\" id=\"image\"/>\n", FN.CStr());
	fprintf(F, "<div id=\"container\">\n<p>\n");
	fprintf(F, "Quote 1 (Red)   : %s <br />\n", QtBs->QuoteH[QtN1].GetStr().CStr());
	fprintf(F, "Quote 2 (Green) : %s <br />\n", QtBs->QuoteH[QtN2].GetStr().CStr());
	fprintf(F, "</p>\n<div class=\"col\">\n");
	fprintf(F, "Time&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Type&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Domain Name <br />\n");
	for (int i = 0; i < Qt1.TmUrlCntV.Len(); i++) {
		TSecTm Tm = Qt1.TmUrlCntV[i].Tm();
		Tm -= TmAdj1;
		if (Tm > EndTm) break;
		TStr Type = (QtBs->GetUrlTy(Qt1.TmUrlCntV[i].UrlId())==utMedia?"M":"B");
		TStr DomainNm =	TStrUtil::GetDomNm2(QtBs->GetStr(Qt1.TmUrlCntV[i].UrlId()));
		fprintf(F, "%s&nbsp;&nbsp;%s&nbsp;&nbsp;%s <br />\n", Tm.GetStr().GetSubStr(7, 18).CStr(), Type.CStr(), DomainNm.CStr());
	}
	fprintf(F, "</div>\n<div class=\"col\">\n");
	fprintf(F, "Time&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Type&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Domain Name <br />\n");
	for (int i = 0; i < Qt2.TmUrlCntV.Len(); i++) {
		TSecTm Tm = Qt2.TmUrlCntV[i].Tm();
		Tm -= TmAdj2;
		if (Tm > EndTm) break;
		TStr Type = (QtBs->GetUrlTy(Qt2.TmUrlCntV[i].UrlId())==utMedia?"M":"B");
		TStr DomainNm =	TStrUtil::GetDomNm2(QtBs->GetStr(Qt2.TmUrlCntV[i].UrlId()));
		fprintf(F, "%s&nbsp;&nbsp;%s&nbsp;&nbsp;%s <br />\n", Tm.GetStr().GetSubStr(7, 18).CStr(), Type.CStr(), DomainNm.CStr());
	}
	fprintf(F, "</div>\n</div>\n</body>\n</html>\n");
	fclose(F);
}

// Plot quote pair trend comparison
void TMemeTrend::PlotTrendQtPrV(TIntPrV& QtPrV, TStr Pref, const TTmUnit& TmUnit, int TmCnt) {	
	for (int i = 0; i < TMath::Mn(100,QtPrV.Len()); i++) {
		TStr FN = TStr::Fmt("%s_%d", Pref.CStr(), i);
		int QtN1 = QtPrV[i].Val1, QtN2 = QtPrV[i].Val2;
		TQuote::TTmFltPrV SigV1;	ComputeQtSig(QtN1, SigV1, TmUnit, TmCnt);
		TQuote::TTmFltPrV SigV2;	ComputeQtSig(QtN2, SigV2, TmUnit, TmCnt);
		PlotClusterPair(FN, QtN1, QtN2, SigV1, SigV2, TmUnit);
		DumpClusterPairHTML(FN, QtN1, QtN2, TmUnit, TmCnt);
	}
	FILE* F = fopen("HTML_QuoteTrend.html", "w");
	fprintf(F, "<html>\n\n<body>\n");
	for (int i = 0; i < QtPrV.Len(); i++) {
		fprintf(F, "<p>\n");
		fprintf(F, "<a href=\"data/QtTrendVar_%d.html\"> Quote Pair %d </a> <br />\n", i, i);
		fprintf(F, "Quote 1 : %s <br />\n", QtBs->GetQt(QtPrV[i].Val1).GetStr().CStr());
		fprintf(F, "Quote 2 : %s <br />\n", QtBs->GetQt(QtPrV[i].Val2).GetStr().CStr());
		fprintf(F, "</p>\n");		
	}
	fprintf(F, "</body>\n</html>\n");
}

// Plot Top variants of the quote cluster
void TMemeTrend::PlotTopClusterVariants(int TopNClust, int TopNVar, TStr Pref, const TTmUnit& TmUnit, int TmCnt) {
	if (TopNClust > 100) TopNClust = 100;
	printf("Plot Top Clusters versus its variants\n");
	TIntPrV FqIdV;
	for (int i = 0; i < QtBs->ClustQtIdVH.Len(); i++) {
		int CId = QtBs->ClustQtIdVH.GetKey(i);
		FqIdV.Add(TIntPr(QtBs->GetClustFq(CId), CId));
	}
	FqIdV.Sort(false);
	for (int i = 0; i < TMath::Mn(FqIdV.Len(), TopNClust); i++) {
		TIntV& ClustV = QtBs->ClustQtIdVH.GetDat(FqIdV[i].Val2);
		TIntPrV FqQtIdV;
		for (int j = 0; j < ClustV.Len(); j++) 
			FqQtIdV.Add(TIntPr(QtBs->GetQtN(ClustV[j]).GetUrls(), ClustV[j]));
		FqQtIdV.Sort(false);
		TStr OutFNm = TStr::Fmt("%s%d", Pref.CStr(), i);
		TGnuPlot Gp(OutFNm, "Comparing cluster with top variant");		
		TQuote CentrQt;
		QtBs->GetMergedClustQt(FqIdV[i].Val2, CentrQt, false);
		CentrQt.TmUrlCntV.Sort(true);
		TSecTm BegTm;
		if (CentrQt.TmUrlCntV.Len() > 100)
			BegTm = CentrQt.TmUrlCntV[100].Tm();
		else 
			BegTm = CentrQt.TmUrlCntV[CentrQt.TmUrlCntV.Len()-1].Tm();
		BegTm -= 3600 * 24 * 5;
		for (int j = 0; j < CentrQt.TmUrlCntV.Len(); j++) {
			if (CentrQt.TmUrlCntV[j].Tm() > BegTm) {
				BegTm = CentrQt.TmUrlCntV[j].Tm();
				break;
			}
		}

		TFltPrV Data; TQuote::TTmFltPrV SigV;
		ComputeQtSig(CentrQt, SigV, TmUnit, TmCnt, BegTm); GetTmFltDataPr(Data, SigV, TmUnit);
		Gp.AddPlot(Data, gpwLinesPoints, QtBs->GetQt(FqIdV[i].Val2).GetStr());
		for (int j = 0; j < TMath::Mn(FqQtIdV.Len(), TopNVar); j++) {
			TFltPrV Data2; TQuote::TTmFltPrV SigV2;
			ComputeQtSig(FqQtIdV[j].Val2, SigV2, TmUnit, TmCnt, BegTm); GetTmFltDataPr(Data2, SigV2, TmUnit);
			Gp.AddPlot(Data2, gpwLinesPoints, QtBs->GetQt(FqQtIdV[j].Val2).GetStr());
		}
		Gp.SetXYLabel(TStr::Fmt("TmUnit BegTm = %s", BegTm.GetStr().CStr()), "Count");
		Gp.SavePng();	
	}
	printf("Done Top Cluster versus Variants\n");

	FILE* FHTML = fopen("HTML_TopVariant.html", "w"); 
	fprintf(FHTML, "<html>\n<body>\n");

	for (int i = 0; i < TMath::Mn(FqIdV.Len(), TopNClust); i++) {
		int QtN = FqIdV[i].Val2();
		fprintf(FHTML, "<p>\n");
		fprintf(FHTML, "Quote %d : %s <br />\n", i, QtBs->GetQt(QtN).GetStr().CStr());
		fprintf(FHTML, "<img src=\"data/TopVariant/%s%d.png\" /> <br />\n", Pref.CStr(), i);
		fprintf(FHTML, "</p>\n");
	}
	fprintf(FHTML, "</body>\n</html>\n");

	fclose(FHTML);
}

// Dump quote info for quotes that appear in quote trend analysis
void TMemeTrend::DumpTrendQt(TIntPrV& QtVarPrV, TStr OutFNm, bool SkipUrls) {
	TIntSet QtSet;
	for (int i = 0; i < QtVarPrV.Len(); i++) {
		QtSet.AddKey(QtVarPrV[i].Val1);
		QtSet.AddKey(QtVarPrV[i].Val2);
	}
	TIntPrV FqQtIdV;
	for (int i = 0; i < QtSet.Len(); i++) {
		int Fq = QtBs->QuoteH[QtSet[i].Val].GetUrls();
		FqQtIdV.Add(TIntPr(Fq, QtSet[i].Val));
	}
	FqQtIdV.Sort(false);
	FILE* F = fopen(OutFNm.CStr(), "w");
  for (int c = 0; c < FqQtIdV.Len(); c++) {
		const int CId = FqQtIdV[c].Val2();
		const TQuote& Qt = QtBs->GetQt(CId);
		if (!SkipUrls) fprintf(F, "\n");
		fprintf(F, "\t%d\t%d\t%s\t%d\n", Qt.GetFq(), Qt.GetUrls(), Qt.GetStr().CStr(), FqQtIdV[c].Val2());
		if (SkipUrls) continue;
		for (int u = 0; u < Qt.GetUrls(); u++) {
			fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
				QtBs->GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", QtBs->GetStr(Qt.TmUrlCntV[u].UrlId()));
		}
	}
	fclose(F);
}

// Get quote pairs that are similar in the beginning phase
void TMemeTrend::GetTrendQtPrV(TIntPrV& QtPrV, int MinUrls, int TopN, const TTmUnit& TmUnit, int TmCnt) {
	printf("Start finding quote pair with similar beginning phase\n");
	THash<TInt, TQuote::TTmFltPrV> SigH;
	THash<TInt, TFlt> VolH;
	THash<TInt, TFlt> UrlsH;
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		if (Qt.GetUrls() > LoThresh && Qt.GetUrls() < HiThresh) continue;
		if (Qt.GetUrls() < MinUrls) continue;
		if (Qt.GetTmDev() > 7) continue;
		TIntV DomIdV;

		TQuote::TTmFltPrV SigV;
		ComputeQtSig(i, SigV, TmUnit, TmCnt);
		double Vol = 0;
		for (int k = 0; k < TmCnt; k++)
			Vol += SigV[k].Val2;
		if (Vol < 3 * TmCnt) continue;
		VolH.AddDat(i) = Vol;
		SigH.AddDat(i, SigV);
		UrlsH.AddDat(i) = Qt.GetUrls();
	}
	printf("SigH size = %d\n", SigH.Len());
	
	THash<TIntPr, TFlt> ResultH;
	#pragma omp parallel for
	for (int i = 0; i < SigH.Len(); i++)  {
		TQuote::TTmFltPrV& SigV1 = SigH[i];
		int QtN1 = SigH.GetKey(i);
		for (int j = i+1; j < SigH.Len(); j++) {
			TQuote::TTmFltPrV& SigV2 = SigH[j];
			int QtN2 = SigH.GetKey(j);
			double dist = 0;
			for (int k = 0; k < TmCnt; k++) {
				// Calculate distance
				dist += fabs(SigV1[k].Val2.Val - SigV2[k].Val2.Val);
			}
			dist = dist / TMath::Mn(VolH.GetDat(QtN1).Val, VolH.GetDat(QtN2).Val);
			if (dist > VMaxDifference) continue;
			double VolDif = UrlsH.GetDat(QtN1).Val / UrlsH.GetDat(QtN2).Val; 
			if (VolDif > 1/VMinMultiple && VolDif < VMinMultiple) continue;
			#pragma omp critical
			{
				if (VolDif < 1) {
					VolDif = 1 / VolDif;
					ResultH.AddDat(TIntPr(QtN2, QtN1)) = VolDif;
				} else {
					ResultH.AddDat(TIntPr(QtN1, QtN2)) = VolDif;
				}
			}
		}
	}
	ResultH.SortByDat(false);
	ResultH.GetKeyV(QtPrV);
	if (QtPrV.Len() > TopN)
		QtPrV.Del(TopN, QtPrV.Len()-1);

	printf("Total # of candidate pairs : %d\n", QtPrV.Len());
	TIntH QtCntH;
	// For any given quote, it appears at most VSameCount times. 
	TIntPrV QtPrVTmp = QtPrV;
	QtPrV.Clr(false);
	for (int i = 0; i < QtPrVTmp.Len(); i++) {
		if (QtCntH.AddDat(QtPrVTmp[i].Val1) < VSameCount && QtCntH.AddDat(QtPrVTmp[i].Val2) < VSameCount) {
			QtPrV.Add(QtPrVTmp[i]);
			QtCntH.AddDat(QtPrVTmp[i].Val1) += 1;
			QtCntH.AddDat(QtPrVTmp[i].Val2) += 1;
		}
	}
	printf("Total # of selected pairs : %d\n", QtPrV.Len());
}

// Dump feature values for meme pair
void TMemeTrend::DumpFeatureStats(const TIntPrV& QtPrV, const TTmUnit& TmUnit, int TmCnt) {
	printf("Dump features stats to file, QtPrV Len = %d\n", QtPrV.Len());
	FILE* FSUM = fopen("STATS_Summary.txt", "w");
	FILE* FALL = fopen("STATS_ALL.txt", "w");

	TVec<TPair<TFltV, TFltV> > QtPrFeatureV;
	for (int i = 0; i < QtPrV.Len(); i++) {		
		int QtN1 = QtPrV[i].Val1, QtN2 = QtPrV[i].Val2;
		TFltV FeatureV1, FeatureV2;
		TIntV DomIdV1, DomIdV2;
		GetQtDomIdV(DomIdV1, QtN1, 30); // test
		GetQtDomIdV(DomIdV2, QtN2, 30); // test
		int BegDomNum = TMath::Mn(DomIdV1.Len(), DomIdV2.Len());
		ComputeFeatureVector(FeatureV1, QtN1, DomIdV1, BegDomNum);
		ComputeFeatureVector(FeatureV2, QtN2, DomIdV2, BegDomNum);
		QtPrFeatureV.Add(TPair<TFltV, TFltV>(FeatureV1, FeatureV2));
	}
	printf("QtFV Len = %d\n", QtPrFeatureV.Len());

	// Dump all features
	for (int i = 0; i < QtPrV.Len(); i++) {
		fprintf(FALL,"Quote %d : %s\n", QtPrV[i].Val1(), QtBs->GetQt(QtPrV[i].Val1()).GetStr().CStr());
		fprintf(FALL,"Quote %d : %s\n", QtPrV[i].Val2(), QtBs->GetQt(QtPrV[i].Val2()).GetStr().CStr());
		for (int j = 0; j < QtPrFeatureV[i].Val1.Len(); j++) {
			fprintf(FALL, "%s 1 = %.02lf\t%s 2 = %.02lf\n", FeatureStrV[j].CStr(), QtPrFeatureV[i].Val1[j].Val, 
				FeatureStrV[j].CStr(), QtPrFeatureV[i].Val2[j].Val);
		}
		fprintf(FALL, "\n");
	}
	fclose(FALL);

	// Dump feature summary
	printf("Dump feature summary\n");
	TFltV PosCount(FeatureStrV.Len()), NegCount(FeatureStrV.Len()), TieCount(FeatureStrV.Len());
	for (int i = 0; i < QtPrV.Len(); i++) {
		for (int j = 0; j < FeatureStrV.Len(); j++) {
			if (QtPrFeatureV[i].Val1[j] > QtPrFeatureV[i].Val2[j]) PosCount[j] += 1;
			if (QtPrFeatureV[i].Val1[j] < QtPrFeatureV[i].Val2[j]) NegCount[j] += 1;
			if (QtPrFeatureV[i].Val1[j] == QtPrFeatureV[i].Val2[j]) TieCount[j] += 1;
		}
	}
	for (int j = 0; j < FeatureStrV.Len(); j++) {
		PosCount[j] = PosCount[j] / QtPrV.Len();
		NegCount[j] = NegCount[j] / QtPrV.Len(); 
		TieCount[j] = TieCount[j] / QtPrV.Len();
		printf("%s\t\tHigh = %%%.2lf\t\tTie = %%%.2lf\t\tLow = %%%.2lf\n", FeatureStrV[j].CStr(), PosCount[j]*100, TieCount[j]*100, NegCount[j]*100);
		fprintf(FSUM, "%s\t\tHigh = %%%.2lf\t\tTie = %%%.2lf\t\tLow = %%%.2lf\n", FeatureStrV[j].CStr(), PosCount[j]*100, TieCount[j]*100, NegCount[j]*100);
	}

	fclose(FSUM);
}

// Graph properties
// TEST Functions
void TMemeTrend::CoMentionLinkG() {
	printf("Test relationship between comention graph and link graph\n");
	TIntPrV EdgeV;
	TNodeEDatNet<TInt, TInt>::TEdgeI EI;
	for (EI = RawCoMDomNet.BegEI(); EI < RawCoMDomNet.EndEI(); EI++) {
		if (EI.GetDstNDat() < 300 || EI.GetSrcNDat() < 300) continue;
		int SrcId = EI.GetSrcNId(), DstId = EI.GetDstNId();
		if (SrcId > DstId) continue;
		if (!LinkDomNet.IsNode(SrcId) || !LinkDomNet.IsNode(DstId)) continue;
		EdgeV.Add(TIntPr(SrcId, DstId));
	}

	TFltV InSimV(EdgeV.Len());
	int Cnt = 0;
	#pragma omp parallel for schedule(dynamic)
	for (int i = 0; i < EdgeV.Len(); i++) {
		if (Cnt++ % 100000 == 0) printf("%d out of %d completed\n", Cnt, EdgeV.Len());
		int SrcId = EdgeV[i].Val1;
		int DstId = EdgeV[i].Val2;
		double InSim, OutSim;
		ComputeCoMJaccardSim(InSim, OutSim, SrcId, DstId);

		if (InSim > 0.95) InSim = 0.96;
		InSimV[i] = InSim;
	}

	THash<TFlt, TFlt> ValCntH, TotCntH, InCompCntH, OutCompCntH;
	THash<TFlt, TFlt> InCompSqrH, OutCompSqrH;
	for (int i = 0; i < EdgeV.Len(); i++) {
		int SrcId = EdgeV[i].Val1;
		int DstId = EdgeV[i].Val2;
		double InSim = InSimV[i];
		double BinNum = int (InSim / 0.05) * 0.05; 
		TotCntH.AddDat(BinNum) += 1;
		if (LinkDomNet.IsEdge(SrcId, DstId) || LinkDomNet.IsEdge(DstId, SrcId))
			ValCntH.AddDat(BinNum) += 1;
		TIntPr Key = TIntPr(SrcId, DstId);
		if (InCompH.IsKey(Key)) {
			InCompCntH.AddDat(BinNum) += InCompH.GetDat(Key).Val;
			OutCompCntH.AddDat(BinNum) += OutCompH.GetDat(Key).Val;
			InCompSqrH.AddDat(BinNum) += InCompH.GetDat(Key).Val * InCompH.GetDat(Key).Val;
			OutCompSqrH.AddDat(BinNum) += OutCompH.GetDat(Key).Val * OutCompH.GetDat(Key).Val;
		}
	}
	
	{
		ValCntH.SortByKey(true);
		TFltPrV XYV;
		TFltV DeltaV;
		for (int i = 0; i < ValCntH.Len(); i++) {
			TFlt BinNum = ValCntH.GetKey(i);
			ValCntH[i] = ValCntH[i].Val / TotCntH.GetDat(BinNum).Val;
			XYV.Add(TFltPr(ValCntH.GetKey(i), ValCntH[i]));
			double Std = sqrt(ValCntH[i].Val * (1 - ValCntH[i].Val) * TotCntH.GetDat(BinNum).Val);
			Std = 0;
			DeltaV.Add(Std);
			printf("BinNum = %.2f\tCnt = %.0f\n", BinNum.Val, TotCntH.GetDat(BinNum).Val);
		}
		TGnuPlot Gp("TEST_COMLinkProb", "Probability of Linking"); 
		Gp.AddErrBar(XYV, DeltaV);
		Gp.AddPlot(XYV);
		Gp.SetXYLabel("Similarity in CoMention Graph", "Probability of Linking");
		Gp.SetScale(gpsAuto);
		Gp.SavePng();
	}

	{
		InCompCntH.SortByKey(true);
		InCompSqrH.SortByKey(true);
		TFltPrV XYV;
		TFltV DeltaV;
		for (int i = 0; i < InCompCntH.Len(); i++) {
			IAssert(InCompCntH.GetKey(i) == InCompSqrH.GetKey(i));
			TFlt BinNum = InCompCntH.GetKey(i);
			InCompCntH[i] = InCompCntH[i].Val / TotCntH.GetDat(BinNum).Val;
			InCompSqrH[i] = InCompSqrH[i].Val / TotCntH.GetDat(BinNum).Val;
			XYV.Add(TFltPr(InCompCntH.GetKey(i),InCompCntH[i]));
			double Std = sqrt(InCompSqrH[i].Val - InCompCntH[i].Val * InCompCntH[i].Val);
			DeltaV.Add(Std);
		}
		TGnuPlot Gp("TEST_COMLinkInComp", "Subscriber Competition Coefficient versus Jaccard Similarity"); 
		Gp.AddErrBar(XYV, DeltaV);
		Gp.AddPlot(XYV);
		Gp.SetXYLabel("Similarity in CoMention Graph", "Subscriber Competition Coefficient");
		Gp.SetScale(gpsAuto);
		Gp.SavePng();
	}
	{
		OutCompCntH.SortByKey(true);
		OutCompSqrH.SortByKey(true);
		TFltPrV XYV;
		TFltV DeltaV;
		for (int i = 0; i < OutCompCntH.Len(); i++) {
			IAssert(OutCompCntH.GetKey(i) == OutCompSqrH.GetKey(i));
			TFlt BinNum = OutCompCntH.GetKey(i);
			OutCompCntH[i] = OutCompCntH[i].Val / TotCntH.GetDat(BinNum).Val;
			OutCompSqrH[i] = OutCompSqrH[i].Val / TotCntH.GetDat(BinNum).Val;
			XYV.Add(TFltPr(OutCompCntH.GetKey(i), OutCompCntH[i]));
			double Std = sqrt(OutCompSqrH[i].Val - OutCompCntH[i].Val * OutCompCntH[i].Val);
			DeltaV.Add(Std);
		}
		TGnuPlot Gp("TEST_COMLinkOutComp", "Source Competition Coefficient versus Jaccard Similarity"); 
		Gp.AddErrBar(XYV, DeltaV);
		Gp.AddPlot(XYV);
		Gp.SetXYLabel("Similarity in CoMention Graph", "Source Competition Coefficient");
		Gp.SetScale(gpsAuto);
		Gp.SavePng();
	}
	TGnuPlot::PlotValCntH(TotCntH,"TEST_COMLinkCnt", "Jaccard Similarity distribution for CoMention graph edges", "Jaccard Similarity in CoMention Graph", "Count");
}

void TMemeTrend::GetAvgDist(PUNGraph& G, double& AvgDist, double& D) {
	TCnComV CnComV;
	TSnap::GetWccs(G, CnComV);
	int Len = CnComV[0].NIdV.Len();
	int SampleNum = 5000;
	TIntSet SampleSet; 
	TRnd Rnd; Rnd.Randomize();
	while (SampleSet.Len() < SampleNum) {
		int idx = int(Rnd.GetUniDev()*Len);
		SampleSet.AddKey(CnComV[0].NIdV[idx]);
	}

	AvgDist = 0; D = 0;
	for (int i = 0; i < SampleSet.Len(); i++) {
		int Id = SampleSet[i];
		TIntH DistH;
		DistH.AddDat(Id) = 0;
		TIntV Queue; 
		Queue.Add(Id);
		int idx = -1;
		while (idx < Queue.Len() - 1) {
			idx += 1;
			int CurId = Queue[idx];
			TUNGraph::TNodeI NI = G->GetNI(CurId);
			for (int e = 0; e < NI.GetDeg(); e++) {
				int NextId = NI.GetOutNId(e);
				if (DistH.IsKey(NextId)) continue;
				DistH.AddDat(NextId) = DistH.GetDat(CurId) + 1;
				Queue.Add(NextId);
			}
		}
		double Dist = 0;
		for (int j = 0; j < DistH.Len(); j++) {
			Dist += DistH[j].Val;
			if (DistH[j].Val > D) 
				D = DistH[j].Val;
		}
		Dist /= DistH.Len();
		AvgDist += Dist;
	}
	AvgDist = AvgDist / SampleSet.Len();
}

void TMemeTrend::RewireCoMGraph() {
	printf("Comparing CoMention Graph with random baseline\n");
	PUNGraph G = PUNGraph::New();
	for (TNodeEDatNet<TInt, TInt>::TNodeI NI = CoMDomNet.BegNI(); NI < CoMDomNet.EndNI(); NI++)
		G->AddNode(NI.GetId());
	for (TNodeEDatNet<TInt, TInt>::TEdgeI EI = CoMDomNet.BegEI(); EI < CoMDomNet.EndEI(); EI++)
		G->AddEdge(EI.GetSrcNId(), EI.GetDstNId());

	TRnd Rnd; //Rnd.Randomize();
	PUNGraph RndG = PUNGraph::New();
	TIntH DegH; int TotCnt = 0;
	for (TUNGraph::TNodeI NI = G->BegNI(); NI < G->EndNI(); NI++) {
		RndG->AddNode(NI.GetId());
		if (NI.GetDeg() != 0)
			DegH.AddDat(NI.GetId()) = NI.GetDeg();
		TotCnt += NI.GetDeg();
	}
	DegH.SortByDat(false);

	int DegHLen = DegH.Len();
	int EdgeNum = G->GetEdges();
	int NodeN = 0;
	while (EdgeNum > 200000) { 
		while (DegH[NodeN] == 0) NodeN++;
		int NodeM = int(Rnd.GetUniDev() * (DegH.Len() - NodeN - 1)) + NodeN + 1;
		IAssert(NodeN != NodeM);
		while (DegH[NodeM] == 0) { NodeM = int(Rnd.GetUniDev() * (DegH.Len() - NodeN - 1)) + NodeN + 1;}
		if (RndG->IsEdge(DegH.GetKey(NodeN), DegH.GetKey(NodeM))) continue;
		DegH[NodeN] -= 1;
		DegH[NodeM] -= 1;
		if (DegH[NodeN] == 0) DegHLen -= 1;
		if (DegH[NodeM] == 0) DegHLen -= 1;
		RndG->AddEdge(DegH.GetKey(NodeN), DegH.GetKey(NodeM));
		EdgeNum -= 1;
	}

	TFltIntPrV NodeV;
	for (int i = 0; i < DegH.Len(); i++) {
		if (DegH[i] == 0) continue;
		for (int j = 0; j < DegH[i].Val; j++) {
			NodeV.Add(TFltIntPr(Rnd.GetUniDev(), DegH.GetKey(i)));
		}
	}
	NodeV.Sort(true);

	int i = 0;
	while (i < NodeV.Len()) {
		if (NodeV[i].Val2 != NodeV[i+1].Val2) 
			RndG->AddEdge(NodeV[i].Val2, NodeV[i+1].Val2);
		i += 2;
	}

	printf("Dumping info for original Graph\n");
	TSnap::PrintInfo(G);
	printf("Dumping info for random graph\n");
	TSnap::PrintInfo(RndG);

	// Connectivity
	TCnComV CnComV, RndCnComV;
	TSnap::GetWccs(G, CnComV);
	TSnap::GetWccs(RndG, RndCnComV);
	TIntH CcH, RndCcH;
	for (int i = 0; i < CnComV.Len(); i++)
		CcH.AddDat(CnComV[i].NIdV.Len()) += 1;
	for (int i = 0; i < RndCnComV.Len(); i++) 
		RndCcH.AddDat(RndCnComV[i].NIdV.Len()) += 1;
	TGnuPlot::PlotValCntH(CcH, "COM Graph", RndCcH, "Rewired Graph", "TEST_COM_cc", "Connectivity comparison between co-mention graph and rewired graph", "Size of clusters", "Count", gpsLog);

	// ClustCf
	double ClustCf = TSnap::GetClustCf(G);
	double RndClustCf = TSnap::GetClustCf(RndG);
	printf("Clustering Coefficient for CoMention Graph is %f\n", ClustCf);
	printf("Clustering Coefficient for Rewired Graph is %f\n", RndClustCf);

	double AvgDist, RndAvgDist;
	double D, RndD;
	GetAvgDist(G, AvgDist, D);
	GetAvgDist(RndG, RndAvgDist, RndD);
	printf("Average Distance for CoMention Graph is %f\n", AvgDist);
	printf("Average Distance for Rewired Graph is %f\n", RndAvgDist);
	printf("Approximate Diameter for CoMention Graph is %f\n", D);
	printf("Approximate Diameter for Rewired Graph is %f\n", RndD);
}


// Utility function
// Dump the most frequent connectors
void TMemeTrend::DumpFreqConnector() {
	FILE* FTMP = fopen("STATS_FqConnector.txt", "w");
	TMPConnCntH.SortByDat(false);
	for (int i = 0; i < TMPConnCntH.Len(); i++) {
		int DomId = TMPConnCntH.GetKey(i);
		fprintf(FTMP, "Fq = %d\tID = %d\tDom = %s\n", TMPConnCntH[i].Val, DomId, DomStrH.GetDat(DomId).CStr());
		fprintf(FTMP, "\tDeg = %d\tOutDeg = %d\tInDeg = %d\n", DomUNGraph->GetNI(DomId).GetDeg(), LinkDomNet.GetNI(DomId).GetOutDeg(), LinkDomNet.GetNI(DomId).GetInDeg());
	}
	fclose(FTMP);
}

// Dump domains by degrees in hyperlink graph
void TMemeTrend::DumpDomainByLinkDeg() {
	// Dump the link graph domains by degree
	TIntPrV FqQtPrV;
	TUNGraph::TNodeI NI;
	for (NI = DomUNGraph->BegNI(); NI < DomUNGraph->EndNI(); NI++) 
		if (NI.GetDeg() > 10) FqQtPrV.Add(TIntPr(NI.GetDeg(), NI.GetId()));
	FqQtPrV.Sort(false);
	FILE* FTMP = fopen("TEST_FqDomain.txt", "w");
	for (int i = 0; i < FqQtPrV.Len(); i++)
		fprintf(FTMP, "Fq = %d, Dom = %s\n", FqQtPrV[i].Val1(), DomStrH.GetDat(FqQtPrV[i].Val2).CStr());
	fclose(FTMP);
}

// Print information about the graph
void TMemeTrend::PrintDomNetInfo(const TNodeEDatNet<TFlt, TFlt>& DomNet) {
  int ZeroNodes=0, ZeroInNodes=0, ZeroOutNodes=0, NonZIODegNodes=0;
  THash<TIntPr, TInt> UniqDirE, UniqUnDirE;
  FILE *F = stdout;
	fprintf(F, "Graph:");
  // calc stat
  for (TNodeEDatNet<TFlt,TFlt>::TNodeI NI = DomNet.BegNI(); NI < DomNet.EndNI(); NI++) {
    if (NI.GetDeg()==0) ZeroNodes++;
    if (NI.GetInDeg()==0) ZeroInNodes++;
    if (NI.GetOutDeg()==0) ZeroOutNodes++;
    if (NI.GetInDeg()!=0 && NI.GetOutDeg()!=0) NonZIODegNodes++;
  }
  // print info
  fprintf(F, "\n");
  fprintf(F, "  Nodes:                    %d\n", DomNet.GetNodes());
  fprintf(F, "  Edges:                    %d\n", DomNet.GetEdges());
  fprintf(F, "  Zero Deg Nodes:           %d\n", ZeroNodes);
  fprintf(F, "  Zero InDeg Nodes:         %d\n", ZeroInNodes);
  fprintf(F, "  Zero OutDeg Nodes:        %d\n", ZeroOutNodes);
  fprintf(F, "  NonZero In-Out Deg Nodes: %d\n", NonZIODegNodes);
}
void TMemeTrend::PrintDomNetInfo(const TNodeEDatNet<TInt, TInt>& DomNet) {
  int ZeroNodes=0, ZeroInNodes=0, ZeroOutNodes=0, NonZIODegNodes=0;
  THash<TIntPr, TInt> UniqDirE, UniqUnDirE;
  FILE *F = stdout;
	fprintf(F, "Graph:");
  // calc stat
  for (TNodeEDatNet<TInt, TInt>::TNodeI NI = DomNet.BegNI(); NI < DomNet.EndNI(); NI++) {
    if (NI.GetDeg()==0) ZeroNodes++;
    if (NI.GetInDeg()==0) ZeroInNodes++;
    if (NI.GetOutDeg()==0) ZeroOutNodes++;
    if (NI.GetInDeg()!=0 && NI.GetOutDeg()!=0) NonZIODegNodes++;
  }
  // print info
  fprintf(F, "\n");
  fprintf(F, "  Nodes:                    %d\n", DomNet.GetNodes());
  fprintf(F, "  Edges:                    %d\n", DomNet.GetEdges());
  fprintf(F, "  Zero Deg Nodes:           %d\n", ZeroNodes);
  fprintf(F, "  Zero InDeg Nodes:         %d\n", ZeroInNodes);
  fprintf(F, "  Zero OutDeg Nodes:        %d\n", ZeroOutNodes);
  fprintf(F, "  NonZero In-Out Deg Nodes: %d\n", NonZIODegNodes);
}

// TEST function
void TMemeTrend::GetDuplicateDoms() {
	THash<TMd5Sig, TMd5Sig> DupDomH;
	printf("Dumping duplicate domains\n");
	PUNGraph G = PUNGraph::New();;
	TNodeEDatNet<TInt, TInt>::TEdgeI EI;
	for (EI = RawCoMDomNet.BegEI(); EI < RawCoMDomNet.EndEI(); EI++) {
		if (EI.GetSrcNDat().Val < 500) continue;
		if (EI.GetDstNDat().Val < 500) continue;
		if (EI.GetDat().Val < 0.9 * TMath::Mn(EI.GetSrcNDat(), EI.GetDstNDat())) continue;
		if (EI.GetDat().Val < 0.5 * TMath::Mx(EI.GetSrcNDat(), EI.GetDstNDat())) continue;
		int SrcId = EI.GetSrcNId();
		int DstId = EI.GetDstNId();
		//if (DomNet.IsEdge(SrcId, DstId) || DomNet.IsEdge(DstId, SrcId)) continue;
		if (!G->IsNode(SrcId))
			G->AddNode(SrcId);
		if (!G->IsNode(DstId))
			G->AddNode(DstId);
		G->AddEdge(SrcId, DstId);
	}
	TCnComV CnComV;
	TSnap::GetWccs(G, CnComV);
	FILE* F = fopen("TEST_DupDoms.txt", "w");
	for (int i = 0; i < CnComV.Len(); i++) {
		if (CnComV[i].NIdV.Len() == 1) continue;
		TMd5Sig RepDomSig(DomIdH.GetKey(CnComV[i].NIdV[0]));
		for (int j = 0; j < CnComV[i].NIdV.Len(); j++) {
			DupDomH.AddDat(DomIdH.GetKey(CnComV[i].NIdV[j])) = RepDomSig;
			fprintf(F, "%s\t", DomStrH.GetDat(CnComV[i].NIdV[j]).CStr());
		}
		fprintf(F, "\n");
	}
	fclose(F);
	TFOut FOut("DupDoms.bin"); DupDomH.Save(FOut);
}

void TMemeTrend::DomainPrediction(TIntPrV& QtVarPrV, int VBegDomNum) {
	printf("Individual domain prediction\n");
	TVec<TQuad<TFlt, TInt, TFlt, TFlt> > RecQV;
	TIntSet DomSet;
	for (int i = 0; i < QtVarPrV.Len(); i++) {
		int QtN1 = QtVarPrV[i].Val1, QtN2 = QtVarPrV[i].Val2;
		TIntV DomIdV1, DomIdV2;
		GetQtDomIdV(DomIdV1, QtN1, VBegDomNum);
		GetQtDomIdV(DomIdV2, QtN2, VBegDomNum);
		for (int j = 0; j < TMath::Mn(DomIdV1.Len(), DomIdV2.Len()); j++) {
			DomSet.AddKey(DomIdV1[j]);
			DomSet.AddKey(DomIdV2[j]);
		}
	}

	FILE* FTMP3 = fopen("TEST_DomPredict.txt", "w");
	for (int i = 0; i < DomSet.Len(); i++) {
		int TESTDomId = DomSet[i];
		if (LinkDomNet.GetNI(TESTDomId).GetInDeg() <= 20 && LinkDomNet.GetNI(TESTDomId).GetOutDeg() <= 20) continue;
		if (LinkDomNet.GetNI(TESTDomId).GetInDeg() <= 10) continue;
		double PCnt = 0, NCnt = 0, TCnt = 0;
		for (int j = 0; j < QtVarPrV.Len(); j++) {
			int QtN1 = QtVarPrV[j].Val1, QtN2 = QtVarPrV[j].Val2;
			TIntV DomIdV1, DomIdV2;
			GetQtDomIdV(DomIdV1, QtN1, VBegDomNum);
			GetQtDomIdV(DomIdV2, QtN2, VBegDomNum);
			TIntSet DomIdSet1(DomIdV1);
			TIntSet DomIdSet2(DomIdV2);
			if (DomIdSet1.IsKey(TESTDomId) && !DomIdSet2.IsKey(TESTDomId)) 
				PCnt += 1;
			else if (!DomIdSet1.IsKey(TESTDomId) && DomIdSet2.IsKey(TESTDomId)) 
				NCnt += 1;
			else 
				TCnt += 1;
		}
		PCnt /= QtVarPrV.Len(); TCnt /= QtVarPrV.Len(); NCnt /= QtVarPrV.Len();
		if (PCnt+NCnt < 0.01) continue;
		if (NCnt < 0.01) continue;
		RecQV.Add(TQuad<TFlt,TInt,TFlt,TFlt>(LinkDomNet.GetNI(TESTDomId).GetOutDeg(), TESTDomId, PCnt, NCnt));
	}
	RecQV.Sort(false);
	
	for (int i = 0; i < RecQV.Len(); i++) {
		int DomId = RecQV[i].Val2;
		double PCnt = RecQV[i].Val3;
		double NCnt = RecQV[i].Val4;
		TStr DomStr = DomStrH.GetDat(DomId);
		fprintf(FTMP3, "%s\t%d\t%d\t%.2f%%\t%.2f%%\n", DomStr.CStr(), LinkDomNet.GetNI(DomId).GetInDeg(), LinkDomNet.GetNI(DomId).GetOutDeg(), PCnt*100, NCnt*100);
	}
	fclose(FTMP3);
}

void TMemeTrend::InvestigateBadQuote(int VBegDomNum) {
		printf("Dumping Bad Quote\n");
		FILE* fin = fopen("Bad Quote.txt", "r");
		FILE* F = fopen("TEST_badstats.txt", "w");
		int N; double ErrRate;
		fscanf(fin, "%d", &N);
		printf("Total %d Bad Quote\n", N);
		for (int idx = 0; idx < N; idx++) {
			int QtN;
			fscanf(fin, "%d %lf", &QtN, &ErrRate);
			TIntV DomIdV;
			GetQtDomIdV(DomIdV, QtN, VBegDomNum);
			TFltV FeatureV;
			ComputeFeatureVector(FeatureV, QtN, DomIdV);
			TQuote& Qt = QtBs->QuoteH[QtN];
			fprintf(F, "\n Error Rate = %f\n", ErrRate);
			fprintf(F, "%d\t%s\t%d\n", Qt.GetUrls(), Qt.GetStr().CStr(), QtN);
			for (int u = 0; u < Qt.GetUrls(); u++) {
				fprintf(F, "\t\t%s\t%d\t%s\t%s\n", Qt.TmUrlCntV[u].Tm().GetYmdTmStr().CStr(), Qt.TmUrlCntV[u].Cnt(), 
				QtBs->GetUrlTy(Qt.TmUrlCntV[u].UrlId())==utMedia?"M":"B", QtBs->GetStr(Qt.TmUrlCntV[u].UrlId()));
			}

			for (int j = 0; j < FeatureV.Len(); j++) {
				fprintf(F, "%d %s = %.02lf\n", j+1, FeatureStrV[j].CStr(), FeatureV[j].Val); 
			}
			fprintf(F, "\n");
		}
		fclose(fin);
		fclose(F);
}

void TMemeTrend::TESTDataSetStats() {
	printf("Dump dataset characteristics\n");

	TFltFltH AvgL1H, HAvgVolH, LAvgVolH;	
	for (int h = 1; h < 20; h++) {
		printf("Processing first %d hours of stats of dataset\n", h);
		TIntSet PosSet, NegSet;
		THash<TInt, TQuote::TTmFltPrV> SigH;
		for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
			TQuote& Qt = QtBs->QuoteH[i];
			if (Qt.GetTy() == qtCentr) continue;
			if (Qt.GetUrls() > LoThresh && Qt.GetUrls() < HiThresh) continue;
			if (Qt.GetUrls() < 30) continue;
			if (Qt.GetTmDev() > 7) continue;			
			TQuote::TTmFltPrV SigV;
			ComputeQtSig(i, SigV, tmu1Hour, h);
			double Vol = 0;
			for (int k = 0; k < h; k++)
				Vol += SigV[k].Val2;
			if (Vol < 3 * h) continue;
			SigH.AddDat(i, SigV);
			if (Qt.GetUrls() >= HiThresh) {
				HAvgVolH.AddDat(h) += Vol;
				PosSet.AddKey(i);
			}
			if (Qt.GetUrls() <= LoThresh) {
				LAvgVolH.AddDat(h) += Vol;
				NegSet.AddKey(i);
			}
		}
		HAvgVolH.GetDat(h) = HAvgVolH.GetDat(h).Val / PosSet.Len();
		LAvgVolH.GetDat(h) = LAvgVolH.GetDat(h).Val / NegSet.Len();

		int IterNum = 10000; 
		TRnd Rnd; Rnd.Randomize();
		for (int i = 0; i < IterNum; i++) {
			int QtN1 = PosSet[int(PosSet.Len() * Rnd.GetUniDev())];
			int QtN2 = NegSet[int(NegSet.Len() * Rnd.GetUniDev())];
			TQuote::TTmFltPrV& SigV1 = SigH.GetDat(QtN1);
			TQuote::TTmFltPrV& SigV2 = SigH.GetDat(QtN2);
			double dist = 0;
			for (int k = 0; k < h; k++) {
				// Calculate distance
				dist += fabs(SigV1[k].Val2.Val - SigV2[k].Val2.Val);
			}
			AvgL1H.AddDat(h) += dist;			
		}
		AvgL1H.GetDat(h) = AvgL1H.GetDat(h).Val / IterNum;
	}

	double HAvgTimeSpan = 0, LAvgTimeSpan = 0;
	double HAvgVol = 0, LAvgVol = 0;
	double HTot = 0, LTot = 0;
	for (int i = 0; i < QtBs->QuoteH.Len(); i++) {
		TQuote& Qt = QtBs->QuoteH[i];
		if (Qt.GetTy() == qtCentr) continue;
		if (Qt.GetUrls() > LoThresh && Qt.GetUrls() < HiThresh) continue;
		if (Qt.GetUrls() < 30) continue;
		if (Qt.GetTmDev() > 7) continue;
		if (Qt.GetUrls() >= HiThresh) {
			HAvgTimeSpan += ComputeTimeSpan(i);
			HAvgVol += Qt.GetUrls();
			HTot += 1;
		}
		if (Qt.GetUrls() <= LoThresh) {
			LAvgTimeSpan += ComputeTimeSpan(i);
			LAvgVol += Qt.GetUrls();
			LTot += 1;
		}
	}
	HAvgTimeSpan /= HTot;
	LAvgTimeSpan /= LTot;
	HAvgVol /= HTot;
	LAvgVol /= LTot;

	printf("Average time span for high volume meme is %.3f hours\n", HAvgTimeSpan);
	printf("Average time span for low volume meme is %.3f hours\n", LAvgTimeSpan);
	printf("Average volume for high volume meme is %.1f mentions\n", HAvgVol);
	printf("Average volume for low volume meme is %.1f mentions\n", LAvgVol);
	
	TGnuPlot::PlotValCntH(AvgL1H, "STATS_dataset_avgl1h", "Average L1 distance between high and low volume memes", "Hours", "Average L1 Distance");
	TGnuPlot::PlotValCntH(HAvgVolH, "High volume meme", LAvgVolH, "Low volume meme", "STATS_dataset_avgvolh", "Average volume for high and low volume memes", "Hours", "Average volume");
}




/////////////////////////////////////////////////
// Quote Clustering Net
PClustNet TClustNet::GetSubGraph(const TIntV& NIdV) const {
  PClustNet NewNetPt = TClustNet::New();
  TClustNet& NewNet = *NewNetPt;
  NewNet.Reserve(NIdV.Len(), -1);
  int node, edge;
  TClustNet::TNodeI NI;
  for (node = 0; node < NIdV.Len(); node++) {
    NewNet.AddNode(NIdV[node], GetNDat(NIdV[node])); // also copy the node data
  }
  for (node = 0; node < NIdV.Len(); node++) {
    NI = GetNI(NIdV[node]);
    const int SrcNId = NI.GetId();
    for (edge = 0; edge < NI.GetOutDeg(); edge++) {
      const int OutNId = NI.GetOutNId(edge);
      if (NewNet.IsNode(OutNId)) {
        NewNet.AddEdge(SrcNId, OutNId); }
    }
  }
  NewNet.Defrag();
  return NewNetPt;
}

void TClustNet::AddLink(const TQuote& SrcQt, const TQuote& DstQt) {
  const int Qt1Id = SrcQt.GetCId();
  const int Qt2Id = DstQt.GetCId();
  if (! IsNode(Qt1Id)) {
    AddNode(Qt1Id, SrcQt); }
  if (! IsNode(Qt2Id)) {
    AddNode(Qt2Id, DstQt); }
  if (! IsEdge(Qt2Id, Qt1Id)) {
    AddEdge(Qt1Id, Qt2Id);
  }
}

PClustNet TClustNet::GetSubGraph(const int& MinQtWords, const int& MaxQtWords, const int& MinFq) const {
  TIntV NIdV;
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    const int Wrds = TStrUtil::CountWords(NI().GetStr().CStr());
    if (NI().GetFq() >= MinFq &&  Wrds >= MinQtWords && Wrds <= MaxQtWords) {
      NIdV.Add(NI.GetId()); }
  }
  return GetSubGraph(NIdV);
}

void TClustNet::RecalcEdges(const double& MinOverlapFrac) {
  printf("Recalculating edges...\n");
  TIntPrV DelEdgeV;
  TStrHash<TInt> StrH(Mega(1), true);
  int WIdV1Start, WIdV2Start, SkipTy;
  TIntV WIdV1, WIdV2;
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    const TStr Q1 = NI().GetStr();
    TStrUtil::GetAddWIdV(StrH, Q1.CStr(), WIdV1);
    for (int e = 0; e < NI.GetOutDeg(); e++) {
      const TStr Q2 = NI.GetOutNDat(e).GetStr();
      TStrUtil::GetAddWIdV(StrH, Q2.CStr(), WIdV2);
      const int Overlap = TQuoteBs::LongestCmnSubSq(WIdV1, WIdV2, WIdV1Start, WIdV2Start, SkipTy);
      const int ShortLen = TMath::Mn(WIdV1.Len(), WIdV2.Len());
      const int LongLen = TMath::Mx(WIdV1.Len(), WIdV2.Len());
      IAssert(Overlap<=ShortLen);
      if (2*ShortLen>LongLen && Overlap/double(ShortLen) > MinOverlapFrac) { continue; }
      DelEdgeV.Add(TIntPr(NI.GetId(), NI.GetOutNId(e)));
    }
  }
  printf("Deleting %d/%d (%.4f) edges\n", DelEdgeV.Len(), GetEdges(), DelEdgeV.Len()/double(GetEdges()));
  for (int i = 0; i < DelEdgeV.Len(); i++) {
    DelEdge(DelEdgeV[i].Val1, DelEdgeV[i].Val2);
  }
}

// Create clusters based on the edges to keep
void TClustNet::MakeClusters(const TIntPrV& KeepEdgeV) {
  // make clusters
  PUNGraph G = TUNGraph::New();
  for (int e = 0; e < KeepEdgeV.Len(); e++) {
    if (! G->IsNode(KeepEdgeV[e].Val1)) {
      G->AddNode(KeepEdgeV[e].Val1); }
    if (! G->IsNode(KeepEdgeV[e].Val2)) {
      G->AddNode(KeepEdgeV[e].Val2); }
    G->AddEdge(KeepEdgeV[e].Val1, KeepEdgeV[e].Val2);
  }
  TCnComV CnComV;
  TSnap::GetWccs(G, CnComV);
  TIntH NIdCcIdH(GetNodes());
  for (int c = 0; c < CnComV.Len(); c++) {
    const TIntV& NIdV = CnComV[c].NIdV;
    for (int n = 0; n < NIdV.Len(); n++) {
      NIdCcIdH.AddDat(NIdV[n], c);
    }
  }
  TIntPrV DelEdgeV;
  for (TEdgeI EI = BegEI(); EI < EndEI(); EI++) {
    const int ccid1 = NIdCcIdH.IsKey(EI.GetSrcNId()) ? NIdCcIdH.GetDat(EI.GetSrcNId()).Val : -1;
    const int ccid2 = NIdCcIdH.IsKey(EI.GetDstNId()) ? NIdCcIdH.GetDat(EI.GetDstNId()).Val : -1;
	//if (ccid1 != ccid2) {
    if (ccid1 != ccid2 && ccid1!=-1 && ccid2!=-1) {
      DelEdgeV.Add(TIntPr(EI.GetSrcNId(), EI.GetDstNId())); }
  }
  const int Edges = GetEdges();
  printf("Deleting %d out of %d  (%f) edges\n", DelEdgeV.Len(), Edges, DelEdgeV.Len()/double(Edges));
  for (int d = 0; d < DelEdgeV.Len(); d++) {
    DelEdge(DelEdgeV[d].Val1, DelEdgeV[d].Val2);
  }
}

void TClustNet::KeepOnlyTree(const TIntPrV& KeepEdgeV) {
  TIntPrSet EdgeSet(KeepEdgeV.Len());
  for (int i = 0; i < KeepEdgeV.Len(); i++) {
    EdgeSet.AddKey(TIntPr(TMath::Mn(KeepEdgeV[i].Val1, KeepEdgeV[i].Val2),
      TMath::Mx(KeepEdgeV[i].Val1, KeepEdgeV[i].Val2)));
  }
  TIntPrSet DelEdgeV;
  for (TEdgeI EI = BegEI(); EI < EndEI(); EI++) {
    const int N1 = TMath::Mn(EI.GetSrcNId(), EI.GetDstNId());
    const int N2 = TMath::Mx(EI.GetSrcNId(), EI.GetDstNId());
    if (! EdgeSet.IsKey(TIntPr(N1, N2))) {
      DelEdgeV.AddKey(TIntPr(N1, N2)); }
  }
  printf("deleting %d edges\n", DelEdgeV.Len());
  for (int d = 0; d < DelEdgeV.Len(); d++) {
    DelEdge(DelEdgeV[d].Val1, DelEdgeV[d].Val2);
  }
}

// Get connected components (clusters)
void TClustNet::GetClusters(TVec<TIntV>& QtNIdV) const {
  TCnComV CnComV;
  TSnap::GetWccs(GetThis(), CnComV);
  CnComV.Sort(false);
  QtNIdV.Clr(false);
  TIntSet SeenSet;
  for (int i = 0; i < CnComV.Len(); i++) {
    for (int n = 0; n < CnComV[i].NIdV.Len(); n++) {
      IAssert(! SeenSet.IsKey(CnComV[i].NIdV[n]));
      SeenSet.AddKey(CnComV[i].NIdV[n]);
    }
    QtNIdV.Add(CnComV[i].NIdV);
  }
}

void TClustNet::GetMergedClustQt(const TIntV& QtIdV, TQuote& NewQt) const {
  int CentrQtId=-1, MxFq=0;
  THash<TPair<TSecTm, TInt>, TInt> TmUrlCntH;
  for (int c = 0; c < QtIdV.Len(); c++) {
    const TQuote& Q = GetNDat(QtIdV[c]);
    IAssert(Q.GetId() == QtIdV[c]);
    for (int t = 0; t < Q.GetTimes(); t++) {
      TmUrlCntH.AddDat(TPair<TSecTm, TInt>(Q.GetTm(t), Q.GetUrlId(t))) += Q.GetCnt(t);
    }
    if (MxFq < Q.GetFq()) {
      MxFq = Q.GetFq();
      CentrQtId = Q.GetId();
    }
  }
  NewQt.QtCIdTy = TQuote::TQtIdTy(-1, qtCentr);
  NewQt.QtStr = GetNDat(CentrQtId).GetStr(); // get more frequent string
  //NewQt.QtStr = GetQt(CId).GetStr();     // get longest string
  NewQt.TmUrlCntV.Gen(TmUrlCntH.Len(), 0);
  for (int u = 0; u < TmUrlCntH.Len(); u++) {
    NewQt.TmUrlCntV.Add(TQuote::TTmUrlCnt(TmUrlCntH.GetKey(u).Val1, TmUrlCntH.GetKey(u).Val2(), TmUrlCntH[u]()));
  }
  NewQt.TmUrlCntV.Sort();
}

// Create subgraphs and see the number of induced edges
int TClustNet::EvalPhraseClusters(const TIntPrV& KeepEdgeV, const bool& dump) const {
  PNGraph G = TNGraph::New();
  for (int e = 0; e < KeepEdgeV.Len(); e++) {
    if (! G->IsNode(KeepEdgeV[e].Val1)) {
      G->AddNode(KeepEdgeV[e].Val1); }
    if (! G->IsNode(KeepEdgeV[e].Val2)) {
      G->AddNode(KeepEdgeV[e].Val2); }
    G->AddEdge(KeepEdgeV[e].Val1, KeepEdgeV[e].Val2);
  }
  TCnComV CnComV;
  TSnap::GetWccs(G, CnComV);
  G = TSnap::ConvertGraph<PNGraph>(TPt<TClustNet>((TClustNet*) this));
  int TotEdges=0, EdgesInBigClust=0;
  int TotNodes=0, NodesInBigClust=0;
  int BigClust=0;
  for (int cc = 0; cc < CnComV.Len(); cc++) {
    if (CnComV[cc].NIdV.Len() < 3) { continue; }
    PNGraph CC = TSnap::GetSubGraph(G, CnComV[cc].NIdV);
    TotEdges += CC->GetEdges();
    TotNodes += CC->GetNodes()-1;
    if (CC->GetNodes() > 10) {
      EdgesInBigClust += CC->GetEdges();
      NodesInBigClust += CC->GetNodes()-1;
      BigClust++;
    }
  }
  if (dump) {
    printf("                                all\tbig(>10)\n");
    printf("  Number of clusters:           %d\t%d\n", CnComV.Len(), BigClust);
    printf("  Total edges inside clusters:  %d\t%d\n", TotEdges, TotEdges-TotNodes);
    printf("  Total edges deleted:          %d\t%d\n", GetEdges()-TotEdges, GetEdges()-TotEdges-TotNodes);
    printf("  Total edges in big clusters:  %d\t%d\n", EdgesInBigClust, EdgesInBigClust-NodesInBigClust);
  }
  return TotEdges-TotNodes;
}

// 1: keep link to most freqeunt variant
// 2: keep link to longest variant
// 3: keep link to shortest variant
// 4: keep random link
void TClustNet::ClustKeepSingleEdge(const int& MethodId) const {
  TIntPrV KeepEdgeV;
  ClustKeepSingleEdge(MethodId, KeepEdgeV);
}

void TClustNet::ClustKeepSingleEdge(const int& MethodId, TIntPrV& KeepEdgeV) const {
  PNGraph G = TSnap::ConvertGraph<PNGraph>(TPt<TClustNet>((TClustNet*) this));
  KeepEdgeV.Clr(false);
  // keep only a single edge out of each node
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    IAssert(NI.GetId() == NI().GetId());
    int EdgeToKeep = -1, BestVal=0;
    for (int e = 0; e < NI.GetOutDeg(); e++) {
      // keep link to most frequent variant
      if (MethodId==1 && NI.GetOutNDat(e).GetFq() > BestVal) { EdgeToKeep=e; BestVal=NI.GetOutNDat(e).GetFq(); }
      // keep link to longest variant
      if (MethodId==2 && NI.GetOutNDat(e).GetStr().Len() > BestVal) { EdgeToKeep=e; BestVal=NI.GetOutNDat(e).GetStr().Len(); }
      // keep link to shortest variant
      if (MethodId==3 && NI.GetOutNDat(e).GetStr().Len() < BestVal ||BestVal==0) { EdgeToKeep=e; BestVal=NI.GetOutNDat(e).GetStr().Len(); }
    }
    // random link
    if (MethodId==4 && NI.GetOutDeg()>0) {
      EdgeToKeep = TInt::Rnd.GetUniDevInt(NI.GetOutDeg()); }
    if (EdgeToKeep!=-1) {
      KeepEdgeV.Add(TIntPr(NI.GetId(), NI.GetOutNId(EdgeToKeep))); }
  }
  if (MethodId==1) { printf("Keep edge to most frequent quote:\n"); }
  if (MethodId==2) { printf("Keep edge to longest quote:\n"); }
  if (MethodId==3) { printf("Keep edge to shortest quote:\n"); }
  if (MethodId==4) { printf("Keep random edge:\n"); }
  EvalPhraseClusters(KeepEdgeV);
}

void TClustNet::ClustGreedyTopDown() const {
  TIntPrV KeepEdgeV;
  ClustGreedyTopDown(KeepEdgeV);
}

void TClustNet::ClustGreedyTopDown(TIntPrV& KeepEdgeV) const {
  TIntH NIdOutDegH;
  TIntH NIdClustH;
  // find root nodes
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    NIdOutDegH.AddDat(NI.GetId(), NI.GetOutDeg());
    if (NI.GetOutDeg() == 0) {
      NIdClustH.AddDat(NI.GetId(), NI.GetId()); }
  }
  printf("%d root nodes\n", NIdClustH.Len());
  printf("%d nodes\n", NIdOutDegH.Len());
  NIdOutDegH.SortByDat(true);
  THash<TInt, TIntPr> ClustCntH;
  KeepEdgeV.Clr(false);
  while (NIdOutDegH.Len() > 0 && NIdOutDegH[0] == 0) {
    for (int i = 0; i < NIdOutDegH.Len() && NIdOutDegH[i] == 0; i++) {
      IAssert(IsNode(NIdOutDegH.GetKey(i)));
      const TNodeI NI = GetNI(NIdOutDegH.GetKey(i));
      // tell children that node knows its cluster id
      for (int e = 0; e < NI.GetInDeg(); e++) {
        NIdOutDegH.GetDat(NI.GetInNId(e)) -= 1; }
      // set node cluster id
      ClustCntH.Clr(false);
      for (int e = 0; e < NI.GetOutDeg(); e++) {
        IAssert(NIdClustH.IsKey(NI.GetOutNId(e)));
        ClustCntH.AddDat(NIdClustH.GetDat(NI.GetOutNId(e))).Val1 += 1; // NI.GetOutNDat(e).GetFq();
        ClustCntH.AddDat(NIdClustH.GetDat(NI.GetOutNId(e))).Val2 = NI.GetOutNId(e);
      }
      ClustCntH.SortByDat(false);
      if (ClustCntH.Len() > 0) {
        // point to cluster where we have most edges into
        const int NId = NI.GetId();
        const int NId2 = ClustCntH[0].Val2;
        const int CId = NIdClustH.GetDat(NId2);
        KeepEdgeV.Add(TIntPr(NId, NId2)); // edge
        NIdClustH.AddDat(NId, CId); // cluster id
      }
      NIdOutDegH[i] = TInt::Mx;
    }
    NIdOutDegH.SortByDat(true);
  }
  printf("Greedy top down approach:\n");
  EvalPhraseClusters(KeepEdgeV);
}

void TClustNet::ClustGreedyRandom() const {
  printf("Greedy random:\n");
  THash<TInt, TIntV> OutNIdV;
  THash<TInt, TInt> EdgeH;
  TIntPrV KeepEdgeV;
  // created node out edge vector
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    if (NI.GetOutDeg() > 0) {
      EdgeH.AddDat(NI.GetId()) = NI.GetOutNId(TInt::Rnd.GetUniDevInt(NI.GetOutDeg()));
      if (NI.GetOutDeg() > 1) {
        for (int e = 0; e < NI.GetOutDeg(); e++) {
          OutNIdV.AddDat(NI.GetId()).Add(NI.GetOutNId(e)); }
      }
    }
  }
  // geedy top down to get the initial solution
  { TIntH NIdOutDegH;
  TIntH NIdClustH;
  for (TNodeI NI = BegNI(); NI < EndNI(); NI++) {
    NIdOutDegH.AddDat(NI.GetId()) = NI.GetOutDeg();
    if (NI.GetOutDeg() == 0) {
      NIdClustH.AddDat(NI.GetId(), NI.GetId()); }
  }
  NIdOutDegH.SortByDat(true);
  THash<TInt, TIntPr> ClustCntH;
  while (NIdOutDegH.Len() > 0 && NIdOutDegH[0] == 0) {
    for (int i = 0; i < NIdOutDegH.Len() && NIdOutDegH[i] == 0; i++) {
      const TNodeI NI = GetNI(NIdOutDegH.GetKey(i));
      // tell childern that node knows its cluster id
      for (int e = 0; e < NI.GetInDeg(); e++) {
        NIdOutDegH.GetDat(NI.GetInNId(e)) -= 1; }
      // set node cluster id
      ClustCntH.Clr(false);
      for (int e = 0; e < NI.GetOutDeg(); e++) {
        ClustCntH.AddDat(NIdClustH.GetDat(NI.GetOutNId(e))).Val1 += 1;
        ClustCntH.AddDat(NIdClustH.GetDat(NI.GetOutNId(e))).Val2 = NI.GetOutNId(e);
      }
      ClustCntH.SortByDat(false);
      if (ClustCntH.Len() > 0) {
        // point to cluster where we have most edges into
        KeepEdgeV.Add(TIntPr(NI.GetId(), ClustCntH[0].Val2));
      }
      NIdOutDegH[i] = TInt::Mx;
    }
    NIdOutDegH.SortByDat(true);
  } }
  printf("%d\n", EdgeH.Len());
  for (int e = 0; e < KeepEdgeV.Len(); e++) {
    EdgeH.AddDat(KeepEdgeV[e].Val1) = KeepEdgeV[e].Val2;
  }
  printf("%d\n", EdgeH.Len());
  EdgeH.GetKeyDatPrV(KeepEdgeV);
  int CurScore = EvalPhraseClusters(KeepEdgeV);
  while (true) {
    //for (int i = 0; i < 10; i++) {
      const int RndNId = OutNIdV.GetKey(TInt::Rnd.GetUniDevInt(OutNIdV.Len()));
      const int RndEdge = OutNIdV.GetDat(RndNId)[TInt::Rnd.GetUniDevInt(OutNIdV.GetDat(RndNId).Len())];
      const int id = EdgeH.GetKeyId(RndNId);  IAssert(KeepEdgeV[id].Val1 == RndNId);
      if (KeepEdgeV[id].Val2 == RndEdge) { continue; } // same edge
      const int OldE = KeepEdgeV[id].Val2;
      KeepEdgeV[id].Val2 = RndEdge;
    //}
    const int NewScore = EvalPhraseClusters(KeepEdgeV, false);
    if (NewScore > CurScore /*|| TInt::Rnd.GetUniDev() < 0.1*/) {
      printf("%6d --> %6d\n", CurScore, NewScore);
      CurScore = NewScore;
    }
    else {  KeepEdgeV[id].Val2 = OldE; } // don't make the change
  }
}

TChA TClustNet::InsertLineBreaks(const TChA& ChA, const int& BreakAtPost) {
  TChA Tmp = ChA, Out;
  int Lines = 1;
  TVec<char*> WrdV;
  TStrUtil::SplitWords(Tmp, WrdV);
  for (int w = 0; w < WrdV.Len(); w++) {
    if (Out.Len() + (int)strlen(WrdV[w]) > Lines*BreakAtPost) {
      Lines++; Out+="\\n"; }
    Out += WrdV[w];
    Out += " ";
  }
  return Out;
}

void TClustNet::DrawNet(const TStr& OutFNm, const int& SaveTopN) const {
  TCnComV CnComV;
  TSnap::GetWccs(GetThis(), CnComV);
  CnComV.Sort(false);
  for (int Comp = 0; Comp<TMath::Mn(SaveTopN, CnComV.Len()); Comp++) {
    if (CnComV[Comp].Len() < 5) { continue; }
    TPt<TNet> SubNet = TSnap::GetSubGraph(TPt<TNet>((TClustNet*) this), CnComV[Comp].NIdV);
    printf("draw: %d nodes, %d edges\n", SubNet->GetNodes(), SubNet->GetEdges());
    FILE *F = fopen(TStr::Fmt("%s-c%02d.dot", OutFNm.CStr(), Comp).CStr(), "wt");
    fprintf(F, "digraph G { /*%d nodes, %d edges*/\n", SubNet->GetNodes(), SubNet->GetEdges());
    fprintf(F, "  graph [splines=true overlap=false rankdir=LR]\n");
    fprintf(F, "  node  [shape=box, fontsize=14]\n");
    for (TNet::TNodeI NI = SubNet->BegNI(); NI < SubNet->EndNI(); NI++) {
      fprintf(F, "  %d [label=\"%s (%d, %d)\"];\n", NI.GetId(), InsertLineBreaks(NI().GetStr(), 80).CStr(), NI().GetFq(), NI().GetUrls());
    }
    for (TNet::TEdgeI EI = SubNet->BegEI(); EI < SubNet->EndEI(); EI++) {
      fprintf(F, "  %d -> %d;\n", EI.GetSrcNId(), EI.GetDstNId());
    }
    fprintf(F, "}\n");
    fclose(F);
    TGraphViz::DoLayout(TStr::Fmt("%s-c%02d.dot", OutFNm.CStr(), Comp), TStr::Fmt("%s-c%02d.ps", OutFNm.CStr(), Comp), gvlDot);
    TSnap::SavePajek(SubNet, TStr::Fmt("%s-c%02d.net", OutFNm.CStr(), Comp));
  }
}

// Dump: Quote, Cluster size, Frequency
void TClustNet::DumpNodes(const TStr& OutFNm, const int& SaveTopN) const {
  TIntV NIdV;  GetNIdV(NIdV);
  TIntH NIdCompSzH;
  { TCnComV CnComV;
  TSnap::GetWccs(GetThis(), CnComV);
  for (int c = 0; c < CnComV.Len(); c++) {
    for (int n = 0; n < CnComV[c].NIdV.Len(); n++) {
      NIdCompSzH.AddDat(CnComV[c].NIdV[n], CnComV[c].Len());
    }
  } }
  TIntPrV FqNIdV;
  for (int n = 0; n < NIdV.Len(); n++) {
    FqNIdV.Add(TIntPr(GetNDat(NIdV[n]).GetFq(), NIdV[n]));
  }
  FqNIdV.Sort(false);
  FILE *F = fopen(TStr::Fmt("nodes-%s.txt", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "#Freq\tClustSz\tQuote\n");
  for (int i = 0; i < FqNIdV.Len(); i++) {
    const TQuote& Q = GetNDat(FqNIdV[i].Val2);
    fprintf(F, "%d\t%d\t%s\n", Q.GetFq(), NIdCompSzH.GetDat(FqNIdV[i].Val2).Val, Q.GetStr().CStr());
  }
}

void TClustNet::DumpClusters(const TStr& OutFNm, int SaveTopN) const {
  TCnComV CnComV;
  TSnap::GetWccs(GetThis(), CnComV); // wccs
  CnComV.Sort(false);
  FILE *F = fopen(TStr::Fmt("clust-%s.txt", OutFNm.CStr()).CStr(), "wt");
  if (SaveTopN==-1) { SaveTopN=TInt::Mx; }
  for (int Comp = 0; Comp<TMath::Mn(SaveTopN, CnComV.Len()); Comp++) {
    const TIntV& NIdV = CnComV[Comp].NIdV;
    TIntPrV FqNIdV;
    int SumFq=0;
    for (int n = 0; n < NIdV.Len(); n++) {
      FqNIdV.Add(TIntPr(GetNDat(NIdV[n]).GetFq(), NIdV[n]));
      SumFq += GetNDat(NIdV[n]).GetFq();
    }
    FqNIdV.Sort(false);
    fprintf(F, "%d quotes, total freq %d\n", FqNIdV.Len(), SumFq);
    for (int i = 0; i < FqNIdV.Len(); i++) {
      const TQuote& Q = GetNDat(FqNIdV[i].Val2);
      fprintf(F, "%d\t%s\n", Q.GetFq(), Q.GetStr().CStr());
    }
    fprintf(F, "\n");
  }
}

void TClustNet::DumpClustersByVol(const TStr& OutFNm, const int& MinClustSz, const int& MinVolume) const {
  TCnComV CnComV;
  TSnap::GetWccs(GetThis(), CnComV);
  CnComV.Sort(false);
  // sort clusters by FQ
  TIntPrV FqClustV;
  TIntH ClSzH, ClFqH;
  int FqMore1k=0;
  for (int Comp = 0; Comp < CnComV.Len(); Comp++) {
    const TIntV& NIdV = CnComV[Comp].NIdV;
    if (NIdV.Len() < MinClustSz) { continue; } // minimum number of variants
    int SumFq=0;
    for (int n = 0; n < NIdV.Len(); n++) {
      SumFq += GetNDat(NIdV[n]).GetFq(); }
    FqClustV.Add(TIntPr(SumFq, Comp));
    ClFqH.AddDat(SumFq) += 1;
    ClSzH.AddDat(NIdV.Len()) += 1;
    if (SumFq>500) { FqMore1k++; }
  }
  FqClustV.Sort(false);
  // save
  FILE *F = fopen(TStr::Fmt("clustFq-%s.txt", OutFNm.CStr()).CStr(), "wt");
  fprintf(F, "Cluster network:\n%d nodes\n%d edges\n%d clusters\n%d big clusters (>=%d)\n",
    GetNodes(), GetEdges(), CnComV.Len(), FqClustV.Len(), MinClustSz);
  for (int c = 0; c < FqClustV.Len(); c++) {
    const TIntV& NIdV = CnComV[FqClustV[c].Val2].NIdV;
    TIntPrV FqNIdV;
    int SumFq=0;
    for (int n = 0; n < NIdV.Len(); n++) {
      FqNIdV.Add(TIntPr(GetNDat(NIdV[n]).GetFq(), NIdV[n]));
      SumFq += GetNDat(NIdV[n]).GetFq();
    }
    if (SumFq < MinVolume) { continue; }
    FqNIdV.Sort(false);
    fprintf(F, "%d\t%d items\t%d totFq\n", c, FqNIdV.Len(), SumFq);
    for (int i = 0; i < FqNIdV.Len(); i++) {
      const TQuote& Q = GetNDat(FqNIdV[i].Val2);
      fprintf(F, "\t%d\t%s\n", Q.GetFq(), Q.GetStr().CStr());
    }
    fprintf(F, "\n");
  }
  TGnuPlot::PlotValCntH(ClFqH, "clVol."+OutFNm, TStr::Fmt("%s. %d nodes, %d edges, %d clusters, %d big clusters (>=%d), %d with vol>500",
    OutFNm.CStr(), GetNodes(), GetEdges(), CnComV.Len(), FqClustV.Len(), MinClustSz, FqMore1k), "Cluster volume", "Count", gpsLog);
  TGnuPlot::PlotValCntH(ClSzH, "clSz."+OutFNm, TStr::Fmt("%s. %d nodes, %d edges, %d clusters, %d big clusters (>=%d), %d with vol>500",
    OutFNm.CStr(), GetNodes(), GetEdges(), CnComV.Len(), FqClustV.Len(), MinClustSz, FqMore1k), "Cluster size", "Count", gpsLog);

}

// Build phrase inverted index: word --> phrase id
void BuildPhraseInvertIdx(const PQuoteBs& QtBs, TStrHash<TInt>& WordIdH, THash<TInt, TIntV>& WIdQtIdVH) {
  printf("build quote word inverted index\n");
  TIntV WIdV;
  TIntSet WIdSet;
  for (int q = 0; q < QtBs->Len(); q++) {
    const TQuote& Q = QtBs->GetQtN(q);
    TStrUtil::GetAddWIdV(WordIdH, Q.GetStr().CStr(), WIdV);
    const int Doms = Q.GetDoms(*QtBs);
    if (! (Doms>1 && Doms*4 > Q.GetUrls() && Q.GetFq() >= 5)) { // skip quotes from too few domains
      continue;
    }
    WIdSet.Clr(false); // count each word only once
    for (int w = 0; w < WIdV.Len(); w++) {
      WIdSet.AddKey(WIdV[w]);
    }
    for (int w = 0; w < WIdSet.Len(); w++) {
      WIdQtIdVH.AddDat(WIdSet[w]).Add(q);
    }
  }
  for (int i = 0; i < WIdQtIdVH.Len(); i++) {
    WIdQtIdVH[i].Pack();
  }
  printf("done.\n");
}

void DumpQtV(THashSet<TInt>& QtV, const PQuoteBs& QtBs) {
	printf("---------------------------------------------\n");
	for (int i = 0; i < QtV.Len(); i++) {
		int qt = QtV[i];
		const TQuote& Q = QtBs->GetQtN(qt);
		printf("Q %s\n", Q.GetStr().CStr());
	}
	printf("\n");

	return;
}

/////////////////////////////////////////////////
// Quote Loader
void TQuoteLoader::Clr() {
  PostTitleStr.Clr();
  PostUrlStr.Clr();
  PubTm = TSecTm();
  BlogUrlStr.Clr();
  BlogTitleStr.Clr();
  ContentStr.Clr();
  QuoteV.Clr(false);
  LinkV.Clr(false);
}

bool TQuoteLoader::LoadItem(TXmlLx& XmlLx) {
  static const TSecTm BegOfTm(2008,8,30, 0, 0, 0);
  Clr();
  try {
    EAssert(XmlLx.TagNm == "post");
    const TChA T = TStrUtil::GetXmlTagVal(XmlLx, "pubDate");
    PubTm = TSecTm(atoi(T.GetSubStr(0,3).CStr()), atoi(T.GetSubStr(5,6).CStr()), atoi(T.GetSubStr(8,9).CStr()),
      atoi(T.GetSubStr(11,12).CStr()), atoi(T.GetSubStr(14,15).CStr()), atoi(T.GetSubStr(17,18).CStr()));
    EAssert(PubTm > BegOfTm);
    PostUrlStr = TStrUtil::GetXmlTagVal(XmlLx, "postUrl");
    PostTitleStr = TStrUtil::GetXmlTagVal(XmlLx, "postTitle");
    BlogUrlStr = TStrUtil::GetXmlTagVal(XmlLx, "blogUrl");
    BlogTitleStr = TStrUtil::GetXmlTagVal(XmlLx, "blogTitle");
    ContentStr = TStrUtil::GetXmlTagVal(XmlLx, "content");
    // load quotes
    while (XmlLx.GetSym()==xsySTag && XmlLx.TagNm=="q") {
      EAssert(XmlLx.GetSym() == xsyStr);
      QuoteV.Add(XmlLx.TxtChA);
      EAssert(XmlLx.GetSym() == xsyETag && XmlLx.TagNm=="q");
    }
  }
  catch (PExcept Except){
    ErrNotify(Except->GetStr());
    Fail;  return false;
  }
  return true;
}

void TQuoteLoader::Save(TSOut& SOut) const {
  PubTm.Save(SOut);
  PostUrlStr.Save(SOut);
  PostTitleStr.Save(SOut);
  BlogUrlStr.Save(SOut);
  BlogTitleStr.Save(SOut);
  ContentStr.Save(SOut);
  QuoteV.Save(SOut);
  LinkV.Save(SOut);
}

void TQuoteLoader::Load(TSIn& SIn) {
  PubTm.Load(SIn);
  PostUrlStr.Load(SIn);
  PostTitleStr.Load(SIn);
  BlogUrlStr.Load(SIn);
  BlogTitleStr.Load(SIn);
  ContentStr.Load(SIn);
  QuoteV.Load(SIn);
  LinkV.Load(SIn);
}

bool TQuoteLoader::Next() {
  if (SIn.Empty() || SIn->Eof()) {
    printf("  new file");
    if (! FFile.Next(CurFNm)) { return false; }
    printf(" %s\n", CurFNm.GetFMid().CStr());
    SIn = TZipIn::IsZipExt(CurFNm.GetFExt()) ? PSIn(new TZipIn(CurFNm)) : PSIn(new TFIn(CurFNm));
    StartProcFile(CurFNm);
  }
  Load(*SIn);
  if (++PostCnt % Kilo(10) == 0) { printf("\r  %dk [%s]  ", PostCnt/Kilo(1), ExeTm.GetStr()); }
  return true;
}

void TQuoteLoader::ProcessPosts(const bool& IsXml, int LoadN) {
  TExeTm ExeTm, TotalTm;
  StartProcess();
  if (LoadN < 0) { LoadN = TInt::Mx; }
  int FilePostCnt=0;
  for (int f = 1; FFile.Next(CurFNm); f++) {
    printf("*** FILE:  %s\n", CurFNm.GetFMid().CStr());
    SIn = TZipIn::IsZipExt(CurFNm.GetFExt()) ? PSIn(new TZipIn(CurFNm)) : PSIn(new TFIn(CurFNm));
    if (IsXml) {
      TXmlLx XmlLx(SIn, xspTruncate);
      StartProcFile(CurFNm);
      for (FilePostCnt=0; XmlLx.GetSym() != xsyEof; FilePostCnt++, PostCnt++) {
        if (! (XmlLx.Sym==xsySTag && XmlLx.TagNm=="post")) {
          while (XmlLx.GetSym()!=xsyEof && ! (XmlLx.Sym==xsySTag && XmlLx.TagNm=="post")) { }
          if (XmlLx.Sym == xsyEof) { break; }
        }
        const bool IsGoodPost = LoadItem(XmlLx);
        ProcessPost(IsGoodPost);
        if (PostCnt % Kilo(1) == 0) {
          printf("\r%dk [%s] ", PostCnt/Kilo(1), ExeTm.GetStr()); }
        if (PostCnt >= LoadN) { break; }
      }
    } else {
      FilePostCnt = 0;
      while (! SIn->Eof()) {
        Load(*SIn);  FilePostCnt++;  PostCnt++;
        ProcessPost(true);
        if (PostCnt % Kilo(10) == 0) {
          printf("\r%dk [%s] ", PostCnt/Kilo(1), ExeTm.GetStr()); }
        if (PostCnt >= LoadN) { break; }
      }
    }
    printf("\n================================================================\n");
    printf("  file:  %s\n", CurFNm.GetFMid().CStr());
    printf("  time:  %s   ", ExeTm.GetStr());
    printf("total: %s [%s]\n", TotalTm.GetStr(), TExeTm::GetCurTm());
    printf("  posts: %d   total: %d\n", FilePostCnt, PostCnt);
    EndProcFile(CurFNm);
    fflush(stdout);  ExeTm.Tick();
    if (PostCnt >= LoadN) { break; }
  }
  EndProcess(PostCnt);
}

/////////////////////////////////////////////////
// Memes Dataset Loader
bool TMemesDataLoader::GetNextFile() {
  TStr FNm;
  if (! FFile.Empty()) {
    if (! FFile->Next(FNm)) { return false; }
    printf("NEXT-FL:  %s :\t%s\n", FNm.GetFBase().CStr(), TExeTm::GetCurTm());
  } else {
    IAssert(! InFNmF.Empty());
    if (InFNmF->Eof()) { return false; }
  while (! InFNmF->Eof() && InFNmF->GetNextLn(FNm) && FNm.Empty()) { }
    printf("NEXT-LN:  %s :\t%s\n", FNm.GetFBase().CStr(), TExeTm::GetCurTm());
  }
  if (FNm.Empty()) { return false; }
  if (TZipIn::IsZipExt(FNm.GetFExt())) {
    SInPt = TZipIn::New(FNm); }
  else {
    SInPt = TFIn::New(FNm); }  LineCnt = 0;
  return true;
}

void TMemesDataLoader::Clr() {
  PostUrlStr.Clr();
  ContentStr.Clr();
  PubTm = TSecTm();
  MemeV.Clr(false);
  MemePosV.Clr(false);
  LinkV.Clr(false);
  LinkPosV.Clr(false);
}

// FORMAT:
//U \t Post URL
//D \t Post time
//T \t Post title (optional!)
//C \t Post content
//L \t Index \t URL      (URL starts at Content[Index])
//Q \t Index \t Length \t Quote (Quote starts at Content[Index])
bool TMemesDataLoader::LoadNext() {
  Clr();
  if (SInPt.Empty() || SInPt->Eof()) {
    return false;
  }
  TSIn& SIn = *SInPt;
  CurLn.Clr();

  // Keep reading until line starts with P\t
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='U' || CurLn[1]!='\t'))) { 
    printf("SKIP: L: %s\n", CurLn.CStr()); LineCnt++; }
  LineCnt++;
  if (CurLn.Empty()) { return LoadNext(); }
  if (!((! CurLn.Empty()) && CurLn[0]=='U' && CurLn[1]=='\t'))  
	{printf("Error reading this file, return\n"); return false;}
  IAssertR((! CurLn.Empty()) && CurLn[0]=='U' && CurLn[1]=='\t', 
    TStr::Fmt("ERROR1: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());
  PostUrlStr = CurLn.CStr()+2;
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='D' || CurLn[1]!='\t'))) { LineCnt++; }

  if (!((! CurLn.Empty()) && CurLn[0]=='D' && CurLn[1]=='\t' && CurLn[2] <'A')) 
	  {printf("Error reading this file, return\n"); return false;}
  IAssertR((! CurLn.Empty()) && CurLn[0]=='D', 
    TStr::Fmt("ERROR2: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++;
  try {
    PubTm = TSecTm::GetDtTmFromStr(CurLn);
  } catch (PExcept Except){ PubTm = 1; ErrNotify(Except->GetStr());
    printf("ERROR3: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()); 
  }

  IAssertR(SIn.GetNextLn(CurLn) && (! CurLn.Empty()) && (CurLn[0]=='C' || CurLn[0]=='T'), 
    TStr::Fmt("ERROR4: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++;
  if (!((! CurLn.Empty()) && (CurLn[0]=='C' || CurLn[0]=='T'))) 
	  {printf("Error reading this file, return\n"); return false;}
  if (CurLn[0] == 'T') { // skip title
    IAssertR(SIn.GetNextLn(CurLn) && (! CurLn.Empty()) && CurLn[0]=='C', 
      TStr::Fmt("ERROR5: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++; }
  ContentStr = CurLn.CStr()+2;
  // Links
  while (SIn.GetNextLn(CurLn)) {  LineCnt++;
    if (CurLn.Empty() || CurLn[0]!='L') { break; }
    int linkb=2;
    while (CurLn[linkb]!='\t') { linkb++; }
    CurLn[linkb]=0;
    LinkV.Add(CurLn.CStr()+linkb+1);
    LinkPosV.Add(atoi(CurLn.CStr()+2));
  }
  // Quotes
  do {
    if (CurLn.Empty() || CurLn[0]!='Q') { break; }
    int qb1=2;      while (CurLn[qb1]!='\t') { qb1++; }
    int qb2=qb1+1;  while (CurLn[qb2]!='\t') { qb2++; }
    CurLn[qb1]=0;  CurLn[qb2]=0;
    MemeV.Add(CurLn.CStr()+qb2+1);
    MemePosV.Add(TIntPr(atoi(CurLn.CStr()+2), atoi(CurLn.CStr()+qb1+1)));
    LineCnt++;
  } while (SIn.GetNextLn(CurLn));
  return true;
}

// For parallel execution
// FORMAT:
//U \t Post URL
//D \t Post time
//T \t Post title (optional!)
//C \t Post content
//L \t Index \t URL      (URL starts at Content[Index])
//Q \t Index \t Length \t Quote (Quote starts at Content[Index])
bool TMemesDataLoader::LoadNext4(THashSet<TMd5Sig>& ElemSet) {
  Clr();
  if (SInPt.Empty() || SInPt->Eof()) {
    return false;
  }
  TSIn& SIn = *SInPt;
  CurLn.Clr();
  ElemSet.Clr();

  // keep reading until line starts with P\t
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='U' || CurLn[1]!='\t'))) { 
    printf("SKIP: L: %s\n", CurLn.CStr()); LineCnt++; }
  LineCnt++;
  if (CurLn.Empty()) { return LoadNext4(ElemSet); }
  if (!((! CurLn.Empty()) && CurLn[0]=='U' && CurLn[1]=='\t'))  
	{printf("Error reading this file, return\n"); return false;}
  IAssertR((! CurLn.Empty()) && CurLn[0]=='U' && CurLn[1]=='\t', 
    TStr::Fmt("ERROR1: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());
  PostUrlStr = CurLn.CStr()+2;
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='D' || CurLn[1]!='\t'))) { LineCnt++; }

  if (!((! CurLn.Empty()) && CurLn[0]=='D' && CurLn[1]=='\t' && CurLn[2] <'A')) 
	  {printf("Error reading this file, return\n"); return false;}
  IAssertR((! CurLn.Empty()) && CurLn[0]=='D', 
    TStr::Fmt("ERROR2: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++;
  try {
    PubTm = TSecTm::GetDtTmFromStr(CurLn);
  } catch (PExcept Except){ PubTm = 1; ErrNotify(Except->GetStr());
    printf("ERROR3: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()); 
  }

  IAssertR(SIn.GetNextLn(CurLn) && (! CurLn.Empty()) && (CurLn[0]=='C' || CurLn[0]=='T'), 
    TStr::Fmt("ERROR4: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++;
  if (!((! CurLn.Empty()) && (CurLn[0]=='C' || CurLn[0]=='T'))) 
	  {printf("Error reading this file, return\n"); return false;}
  if (CurLn[0] == 'T') { // skip title
    IAssertR(SIn.GetNextLn(CurLn) && (! CurLn.Empty()) && CurLn[0]=='C', 
      TStr::Fmt("ERROR5: %s [line %llu]: '%s'\n", SIn.GetSNm().CStr(), LineCnt, CurLn.CStr()).CStr());  LineCnt++; }
  ContentStr = CurLn.CStr()+2;

  TStrV words;	
  TStr content(ContentStr);
  content.SplitOnAllAnyCh(" ?!()@#=&,.<>/\\:\";{}|", words);
  for (int i = 0; i < words.Len()-1; i++) {
	  ElemSet.AddKey(TMd5Sig(words[i] + words[i+1]));
	  //printf("%s %s\n", words[i].CStr(), words[i+1].CStr());
  }
  //printf("%s\n", content.CStr());
  
  // links
  while (SIn.GetNextLn(CurLn)) {  LineCnt++;
    if (CurLn.Empty() || CurLn[0]!='L') { break; }
    int linkb=2;
    while (CurLn[linkb]!='\t') { linkb++; }
    CurLn[linkb]=0;
    LinkV.Add(CurLn.CStr()+linkb+1);
    LinkPosV.Add(atoi(CurLn.CStr()+2));
  }
  // quotes
  do {
    if (CurLn.Empty() || CurLn[0]!='Q') { break; }
    int qb1=2;      while (CurLn[qb1]!='\t') { qb1++; }
    int qb2=qb1+1;  while (CurLn[qb2]!='\t') { qb2++; }
    CurLn[qb1]=0;  CurLn[qb2]=0;
    MemeV.Add(CurLn.CStr()+qb2+1);
    MemePosV.Add(TIntPr(atoi(CurLn.CStr()+2), atoi(CurLn.CStr()+qb1+1)));
    LineCnt++;
  } while (SIn.GetNextLn(CurLn));
  return true;
}


// Skip next post
bool TMemesDataLoader::LoadNextSkip() {
  Clr();
  if (SInPt.Empty() || SInPt->Eof()) {
    return false;
  }
  TSIn& SIn = *SInPt;
  CurLn.Clr();

  // keep reading until line starts with P\t
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='U' || CurLn[1]!='\t'))) { 
    printf("SKIP: L: %s\n", CurLn.CStr()); LineCnt++; } LineCnt++;
  if (CurLn.Empty()) { return LoadNextSkip(); }
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='D' || CurLn[1]!='\t'))) { LineCnt++; } LineCnt++;
  while (SIn.GetNextLn(CurLn) && (CurLn.Empty() || (CurLn[0]!='C' || CurLn[1]!='\t'))) { LineCnt++; } LineCnt++;
  while (SIn.GetNextLn(CurLn)) {  LineCnt++;
    if (CurLn.Empty() || CurLn[0]!='L') { break; }
  }
  // quotes
  do {
    if (CurLn.Empty() || CurLn[0]!='Q') { break; }
    LineCnt++;
  } while (SIn.GetNextLn(CurLn));
  return true;
}

void TMemesDataLoader::SaveTxt(TSOut& SOut) const {
  /*SOut.PutStr("P\t");
  SOut.PutStrLn(PostUrlStr, true);
  SOut.PutStr("T\t");
  SOut.PutStrLn(PubTm.GetYmdTmStr(), true);
  for (int q = 0; q < MemeV.Len(); q++) {
    SOut.PutStr("Q\t");
    SOut.PutStrLn(MemeV[q], true);
  }
  for (int l = 0; l < LinkV.Len(); l++) {
    SOut.PutStr("L\t");
    SOut.PutStrLn(LinkV[l], true);
  }
  SOut.PutLn();
*/
}

void TMemesDataLoader::Dump(const bool& DumpAll) const {
  /*printf("%s\n  %s\tQ:%d\tL:%d\n", PostUrlStr.CStr(), PubTm.GetYmdTmStr().CStr(), MemeV.Len(), LinkV.Len());
  if (DumpAll) {
    for (int i = 0; i < MemeV.Len(); i++) { printf("%s\n", MemeV[i].CStr()); }
    for (int i = 0; i < LinkV.Len(); i++) { printf("%s\n", LinkV[i].CStr()); }
  }*/
}

int TmYMDH::EndDate[13] = {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};

