package jpave.annotator;
/**
* Processes the results of blasting a transcript set against annoDBs
* 1. Go through all blast files. Add all hits to database in unitrans_hits table
* 2. For each newly annotated contig, extract only its hits from database and set filters and best hits
**/

import java.io.BufferedReader;

import java.io.FileReader;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ArrayList;
import java.util.TreeMap;
import java.util.HashMap;

import jpave.annotator.DoBlast;
import jpave.annotator.LineParser;
import jpave.database.CoreDBWrapper;
import jpave.dataholders.BlastHitData;
import jpave.dataholders.ContigData;
import util.methods.FileHelpers;
import util.methods.TimeHelpers;
import util.Debug.ErrorReport;

public class DoUniProt 
{

	static boolean debug =  false;
	int RECORDS_PER_COMMIT = 1000; 
	static final int maxHitSeq = 32000;
	String badHitFile = BlastHitData.badHits;
	
	// bunch of heuristic to figure out non-informative names
	// des is lowercase
	private boolean goodDescript (String des) {
		des = des.replace("_", " ");
		if (des.contains("uncharacterized protein") || 
	    		des.contains("putative uncharacterized") || 
	    		des.contains("hypothetical protein") || 
	    		des.contains("expressed protein") || 
	    		des.contains("predicted protein") ||
	    		des.contains("whole genome shotgun") ||
	    		des.contains("scaffold") || // CAS 27June15
	    		//des.contains("predicted:") || // CAS 27June15 genbank -- these are good descripts
	    		des.equals("unknown") || // CAS 27June15 genbank
	    		des.equals("orf"))
	    	{
	    	        return false;
	    	}
		String [] words = des.split(" ");
		if (words.length > 3) return true; // CAS 27June15 2->3

		// many are just a name or name protein or name (fragment)
		boolean isName = false;
		int num=0;
		for (int i=0; i < words[0].length(); i++) {
			char c = words[0].charAt(i);
			if (Character.isDigit(c)) num++;
		}
		if (num>3) isName=true;
		else {
			int wl = words[0].length();
			double x = (num > 0) ? ((double) wl/ (double) num) : 0;
			if (wl <= 5 && x > 1) isName = true;
		}
		if (words.length == 1 && isName) return false;
		if (isName &&  des.endsWith("protein")) return false;
		if (isName &&  des.endsWith("(fragment)")) return false;
		if (isName &&  des.endsWith("}")) return false; // CAS 27Jun15
		return true;
	}
	
	DoUniProt () {}

	public void setMainObj(CoreAnnotator a, DoBlast b, CoreDBWrapper s) {
		annoObj = a;
		blastObj = b;
		sqlObj = s;
	}
	
	 /**
     * called from CoreAnnotator.run to annotate all contigs with protein or DNA
     * 1. updates pja_db_unitrans_hits with hits that pass PAVE.cfg filters
     * 2. updates pja_db_unique_hits with new hits and/or new annotation
     * 		that is, even if no hits are found in the blast file, 
     * 		the fasta file will still be read for annotation
     * 		this is for the case where it was not done to start with
     * 3. update pja_database by adding the given DB
     **/
	public HashMap <Integer, String> processAllDBblastFiles(TreeMap<String, Integer> ctg, 
			CoreDBWrapper outDB) 
	{
	 	long totalTime = TimeHelpers.getTime();
	 	sqlObj = outDB;
	 	ctgMap  = ctg;
	 	
	 	// process blast files. Annotated contigs are added to annoCtg set.
	 	Step1_processAllDBblastFiles();
		sqlObj.commitChanges();

		// go through annoCtg set and calculate the filter
	 	Step2_processAllHitsPerCtg();
	 	if (nTotalBadHits > 0) 
	 		CoreMain.PrtWarn(nTotalBadHits + " hits ignored -- see " + badHitFile);
	 	if (notFoundHit.size()>0)
	 		CoreMain.PrtWarn(notFoundHit.size() + " not found in database  -- see file " + badHitFile);
	 	if (nWarningHits>0)
	 		CoreMain.PrtWarn(nWarningHits + " name length warnings -- see file " + badHitFile);
	 	if (LineParser.badSpeciesLen>0)
	 		CoreMain.PrtWarn(LineParser.badSpeciesLen + " species length warnings -- see file " + badHitFile);
	 	if (LineParser.badDescriptLen>0)
	 		CoreMain.PrtWarn(LineParser.badDescriptLen + " description length warnings -- see file " + badHitFile);
	 	if (annoMap.size() > 0) outDB.updateAnnoVer();
	
	 	CoreMain.PrtSpMsgTime(1, annoMap.size() + 
			" annotated  " + (ctgMap.size()-annoMap.size()) + " unannotated      Total time: ", totalTime);
	 	return annoMap;
	}
	
	/**********************************************************
	 * Read all files, add hits, annoDB and unique hit information to db
	 */
    private int Step1_processAllDBblastFiles() 
    {	   
	    	int total = 0;
	    	int nFiles = blastObj.numDB();
    	
		try {
			CoreMain.PrtSpMsg(1, "Annotate sequences with blast hits from " + nFiles + " DB file(s)");
			hitInSTC = loadUniqueHitSet();
	    	
		    	for (int i=0; i< nFiles; i++) {  
		    		// Run blast if necessary or get existing blast file
		    		String blastFile = blastObj.getRunDbBlastFile(i); 
		    		if (blastFile==null || blastFile.equals("-")) continue;
		    		  		  		
		    		dbNum = blastObj.getDbNum(i);
		    		isProtein = blastObj.isProtein(i);
		    		if (isProtein) dbLabelType = "protein"; 
		    		else dbLabelType = "DNA";
		    		dbTaxo = blastObj.getDBtaxo(i); 
			    		
		/** 1. Add to pja_unitran_hits and contig SQL tables from blast file **/
				Step1a_processHitBlastFile(i, blastFile);   				
		    	
				if (nAnnoCtg==0 && nTotalHits==0) continue;
		
				dbType = blastObj.getDBtype(i);
				String db = blastObj.getDBfastaFile(i);
		    	
		/** 2. Add to pja_database SQL table **/
				// 	must come before adding unique hits so the DBID is entered into mysql
				if (db.equals("-")) db = blastFile;
				String date = blastObj.getDBdate(i);
				DBID = saveAnnoDB(db, isProtein, 
						dbType, dbTaxo, nAnnoCtg, hitAddToSTW.size(), nTotalHits, 
						blastObj.doSubset(i), date, blastObj.getDBparams(i));
				sqlObj.commitChanges();
				
		/** 3.  Add to pja_unique_hits the sequence and header info from DB fasta file **/
				if (db.equals("-")) Step1c_addUniqueNoFastaFile(i);
				else Step1c_addUniqueFromFastaFile(i);
			    	hitAddToSTW.clear();
			    	
			    	CoreMain.Print("");
			    	total += nAnnoCtg;
			    	nAnnoCtg = 0;
		    	}
		    	return total;
		}
		catch (Exception e) {
			ErrorReport.die(e, "Processing annoDB blast files");
			return 0;
		}
    }
    
    /*************************************************
  	* Read Blast File gathering HSPs for a unitrans
  	* 	call processHitDataForContig to update pja_unitrans_hits  
	*************************************************************/
    private boolean Step1a_processHitBlastFile(int ix, String blastFile ) 
    {     	
       	BlastHitData hitData = null;
       	int nHitNum=0, cntUniqueExists=0;
       	String curCtgName="";
       	nAnnoCtg = nTotalHits = 0;
	    	long time = TimeHelpers.getTime();
				
	    	String file = blastFile.toString();
	    	file = file.substring(file.lastIndexOf("/")+1);
	    	CoreMain.PrtSpMsg(2,"DB#" + dbNum + " hits: " + file);
			
	    	BufferedReader reader = null;
	    	String line="";
	    	try {
	    		sqlObj.reset();
	    		reader = CoreMain.openGZIP(blastFile);
	    		LineParser lp = new LineParser();
	    		HashSet <String> hitList = new HashSet <String> ();
	    		
	    		while ((line = reader.readLine()) != null) {
	    			if ( line.length() == 0 || line.charAt(0) == '#' ) continue;  			
			    	nHitNum++;
		        if (nHitNum % 1000 == 0 ) {                
		          	 System.err.print("      Annotated " + nAnnoCtg + " sequences   " + 
		          		nTotalHits + " total hits   " + nHitNum + " HSPs... \r");
		        }	   
	    			String[] tokens = line.split("\t");
	    			if (tokens == null || tokens.length < 11) continue;
	    			
		    		String newCtgName = tokens[0];
		    		if (!lp.parseLine(line)) continue;
			    	String newHitName = lp.getHitID();
				
				if (hitList.contains(newHitName)) {
					// diamond v0.6.13 does not list hits in order
					continue;
				}

				if (!curCtgName.equals(newCtgName)) { // new contig
					// process previous
					if (! curCtgName.equals("")) { 
						if (curHitDataForCtg.size() > 0) {// may have had bad hit
							addHitDataForCtgToDB(); 
							curHitDataForCtg.clear();
						}
						hitList.clear();
					}
					
					if (!ctgMap.containsKey(newCtgName)) {
						if (!notFoundHit.contains(newCtgName)) {
							BlastHitData.printWarning("Sequence " + newCtgName + " in blast file but not in database");
			    				notFoundHit.add(newCtgName); 
						}
		        			continue;
					}
					curCtgName = newCtgName;					
			        curCtgData.clearAnno();
			        curCtgData = sqlObj.loadContigData( curCtgName); 
			        curCtgData.setContigID(curCtgName); 
			        curCtgData.setCTGID(ctgMap.get(curCtgName));
				}	   
	
			    	// create list of hits for current contig
			    	hitData = new BlastHitData (isProtein, line);
			    	
			    	// if HIT id already in database, then this is being run again
			    	if (hitInSTC.contains(hitData.getHitID())) {
			    		cntUniqueExists++;
			    	}
			    	// hit is past the 32k limit so no use saving
			    	else if (hitData.badHitData(maxHitSeq)) {
			    		nTotalBadHits++;
			    	}
			    	else {
			    		hitData.setCTGID(curCtgData.getCTGID());	   
			    		curHitDataForCtg.add(hitData);	
			    		hitList.add(newHitName);
			    	}
		    	}  // end loop through blast tab file
	    		//-----------------------------------------------------------//
	    		if (! curCtgName.equals("")) { // process last contig
	    			if (curHitDataForCtg.size() > 0) {// may have had bad hit
						addHitDataForCtgToDB(); 
						curHitDataForCtg.clear();
				}
	    		}
	    		if ( reader != null ) reader.close();
			sqlObj.commitChanges ( );
			System.err.print("                                                                         \r");
			if (cntUniqueExists > 1) 
				CoreMain.PrtWarn(cntUniqueExists + " DB ids already existed in sTCW -- ignored ");
	
			if (nHitNum==0) 
				CoreMain.PrtSpMsgTime(3, "NO BLAST RESULTS", time);
			else {
				CoreMain.PrtSpMsgTime(3,nAnnoCtg + " annotated sequences                 ", time); 
			
				if (nTotalHits > 0) 
					CoreMain.PrtSpMsg(3, nTotalHits + " " + dbLabelType +  "-sequence additional pairs ");
			}
	    	}
        catch ( Throwable err ) {
			ErrorReport.reportError(err, "Annotator - reading DB Blast file\nLine: " + line);
			return false;
        }       
        return true;
    }
 
    // gathered all hits in hitDataForCtg. Save each hit. 
    private void addHitDataForCtgToDB()
    {
	    try {
       		for (int i=0; i<curHitDataForCtg.size(); i++) {
       			BlastHitData hitData = curHitDataForCtg.get(i);
       			hitData.setBlastRank ( i+1 );
       			hitData.setDBtaxo(dbTaxo);
    			
       			saveDBhitForCtg(hitData); 
				
       			String p = hitData.getHitID();
       			if (!hitAddToSTW.contains(p)) hitAddToSTW.add(p);
       			
				nTotalHits++;
       		}
       		nAnnoCtg++;
       		String name = curCtgData.getContigID();
		    if (!annoMap.containsKey(name)) {
		    		int id = ctgMap.get(name);
		    		annoMap.put(id, "");
		    }
	    }
        catch ( Exception err ) {
			ErrorReport.die(err, "Annotator - processing hit data");
        }
    }
    
    /*********************************************************************
     * Populate pja_unique_hits
     ************************************************************/
    private void Step1c_addUniqueNoFastaFile(int ix) 
    {
        int cnt_add=0;
       	long time = TimeHelpers.getTime();
       
		CoreMain.PrtSpMsg(2,"DB#" + dbNum + " descriptions: no Database Fasta file provided");
	    	try {
	    		sqlObj.reset();
		    	while (!hitAddToSTW.isEmpty()) {
		    		String hitID = (String)hitAddToSTW.iterator().next();
		    		
				saveDBhitUnique(DBID, isProtein, dbType, dbTaxo, hitID, "", "", "", "", 0);
				hitInSTC.add(hitID);
		    		hitAddToSTW.remove(hitID); 
		    		
				if ((cnt_add++) % RECORDS_PER_COMMIT == 0 ) sqlObj.commitChanges ( );
		    	}
		    	if (cnt_add > 0) sqlObj.commitChanges();
			System.err.print("                                                                      \r");
			CoreMain.PrtSpMsgTime(3,cnt_add + 	" unique hits added",time);
	    	}
        catch ( Throwable err ) {
        		String s = "Annotator - creating unique hit records for blast file w/o description file";
			ErrorReport.reportError(err, s);
			return;
        }
    }
        
    private void Step1c_addUniqueFromFastaFile(int ix)  
    {
        int cnt_add=0, read=0, failParse=0, cntPrt=0;
        	String hitID="", strSequence = "", line=null;
        	LineParser lp = new LineParser();
       	BufferedReader reader = null;
       	long time = TimeHelpers.getTime();
       	int total = hitAddToSTW.size();
		CoreMain.PrtSpMsg(2,"DB#" + dbNum + " descriptions: " + blastObj.getDBfastaNoPath(ix) );
        	try {
        		sqlObj.reset();
        		boolean addHit = false;
        		// CAS 28Apr15 change to reading zip since diamond can process zip
        		reader = CoreMain.openGZIP(blastObj.getDBfastaFile(ix));
			while((line = reader.readLine()) != null) {	
				line = line.trim();
				if (line.length() == 0 || line.charAt(0) == '#') 
					continue;
				// build up sequence 
				if (line.charAt(0) != '>') {
					if (addHit) {
						if (strSequence.length()+line.length() > maxHitSeq) {
							if (strSequence.length() == 0) strSequence = line.substring(0,maxHitSeq-1);  
							BlastHitData.printWarning(lp.getHitID() + " > " + maxHitSeq + "; truncated to " 
											+ strSequence.length() + " to put in database");
						}
						else strSequence += line;
					}
					continue;
				}
				// > description line 
					
				// Add previous 
				if (addHit) {
					saveDBhitUnique(DBID, isProtein, dbType, dbTaxo, hitID, 
						lp.getOtherID(), lp.getDescription(), lp.getSpecies(), strSequence, 0);
					cnt_add++;
				}
					
				// start the next
				strSequence = "";	
				if (!lp.parseFasta(line)) {
		        		CoreMain.PrtWarn("Cannot parse line: " + line);
		        		failParse++;
		        		if (failParse>20) CoreMain.die("Too many parse errors");
					addHit = false;
					continue;
				}
				hitID = lp.getHitID();
				if (hitAddToSTW.contains(hitID)) {
					addHit = true;
					hitInSTC.add(hitID); // CAS is this necessary? unless multiple identical files
					hitAddToSTW.remove(hitID); // or this? possibly in file multiple times
				}
				else addHit = false;
				read++; cntPrt++;
				if (cntPrt == 100000 ) {	                
			         System.err.print("         Unique " + dbLabelType + " added " + cnt_add + 
			          		" of " +  total + "  Read " + read + " records ... \r");
			         cntPrt=0;
				}
				//if (hitAddToSTW.size()==0) break; CAS 4/5/15 causes last sequence not to be added
			}
			// add last
			if (addHit) { 
				saveDBhitUnique(DBID, isProtein, dbType, dbTaxo, lp.getHitID(), lp.getOtherID(), 
						lp.getDescription(), lp.getSpecies(), strSequence, 0);
				cnt_add++;
			}
			if ( reader != null ) reader.close();

			System.err.print("                                                                      \r");
			CoreMain.PrtSpMsgTime(3,cnt_add + 	" unique hits descriptions added from " + read + "    ",time);
			if (cnt_add==0) CoreMain.die("The annoDB fasta file does not correspond to the blast tab file");
			else if (!hitAddToSTW.isEmpty()) {
				CoreMain.PrtWarn(hitAddToSTW.size() + " HitIds in blast file not found in DB fasta file");
				Iterator <String> itr = hitAddToSTW.iterator();
				int i=0;
			    while (itr.hasNext() && i < 5) {
			      CoreMain.PrtSpMsg(1, (String) itr.next());
			      i++;
			    }
			}
	    }
        catch ( Exception err ) {
			ErrorReport.reportError(err, "Annotator - reading DB Fasta file " + ix);
			return;
        }
    }
    
    /*******************************************************
     * XXX After all blast files are processed and all DBfasta info entered
     * For each contig: 
     * 	set pja_db_unitrans_hits.filtered for each hit
     * 	Set filter counts, PIDov (best annotation), PID (best evalue)
     */
    private void Step2_processAllHitsPerCtg() {  
    		int NO_RF=-100;
    		int count=0;
    		long startTime = TimeHelpers.getTime();
        	CoreMain.PrtSpMsg(2,"Set DB filters for " + annoMap.size() + " contigs ");
        	LineParser lp = new LineParser();
        	CtgData ctgData = new CtgData ();
        	ArrayList <HitData> hitList;
        	ArrayList <Olap> olapList = new ArrayList <Olap> ();
        ArrayList <String> geneSet = new ArrayList <String> ();
        ArrayList <String> specSet = new ArrayList <String> ();
        ArrayList <String> taxoSet = new ArrayList <String> ();
    	
		try {
			sqlObj.reset();

		/** go through all contigs that have annoDB hits */
        		for (int CTGid : annoMap.keySet()) { // contigs with DB hits   
        			ctgData.CTGid=CTGid;	
        			count++;
        			if(count % 1000 == 0) System.err.print("         Filtered " + count + " sequences...\r"); 
    			
        			int saveDBID=-1;
				int cntTop=0, cntSwiss=0, cntTrembl=0, cntNT=0, cntGI=0;
			
				HitData bestAnno = null, bestEval = null;
				boolean saveEflag = false;
				double saveEeval = Double.MAX_VALUE, saveAeval = Double.MAX_VALUE;
  			    String saveEtype="", saveAtype="";
  			    
  			    int bestFrame = NO_RF;
  			    int [] frameCnt = new int [7];
  			    int [] frameStart = new int[7];
  			    int [] frameEnd = new int[7];
  			    double [] frameEval = new double[7];
  			    for (int i=0; i<7; i++) frameCnt[i]=0;
  			    
  			    hitList = loadHitDataForCtg(CTGid);
  			    if (hitList==null || hitList.size()==0) continue; 
  			    Collections.sort(hitList);
				  				
		/** go through all hits for the current contig */
        			for (int h=0; h < hitList.size(); h++) {
        				HitData hitData = hitList.get(h);
        			    olapList.add(new Olap(hitData));
        			    
        			/* get best description and best hit */
        		        String des = hitData.desc;
        		        double eval = hitData.eVal;		
        		        String type = hitData.dbtype;
        		        boolean goodDescFlag;
        		        if (type.equals("sp")) goodDescFlag = true;
        		        else goodDescFlag = goodDescript(des);
        		        	
        		        	if (goodDescFlag) {
        		        		if (bestAnno==null ||
        		        			(!saveAtype.equals("sp") && type.equals("sp") && eval < saveAeval))
        		        		{
        		        			if (eval == 0.0) eval = 1e-180; // lowest eval before 0.0	
        		        			double exp = Math.log(eval) * 0.80;
        		        			saveAeval = Math.exp(exp);
        		        			saveAtype = type;
        		        			bestAnno = hitData;
        		        		}
        		        	}   
        		        	if (bestEval == null || 
        		        		(!saveEflag && goodDescFlag && eval < saveEeval) || 
        		        		(!saveEtype.equals("sp") && type.equals("sp") && eval < saveEeval))
	        			{
        		        		if (eval != 0.0) {
        		        			double exp = Math.log(eval) * 0.98; // CAS 27June15 was 0.95
        		        			saveEeval = Math.exp(exp);
        		        		}
        		        		saveEflag = goodDescFlag;
        		        		saveEtype = type;
        		        		bestEval = hitData; 
	        			}
        				
        		     /* compute flags */
        				int dbid = hitData.DBid;
        				if (dbid != saveDBID) {
        					saveDBID = dbid;
        					cntTop=0;
        				}
        				// filter 2 is top three hits for annoDB - not used
        				if (cntTop < 3) { 
        					cntTop++;
        					hitData.filter |= 2;
        				}
        				// filter 4 is top hit for species
        				String spec = lp.getSpecies(hitData.species);
        				if (spec != null && !specSet.contains(spec)) {
        					specSet.add(spec);
        					hitData.filter |= 4;
        				}
        				// filter 8 is top hit for gene annotation
        				String gene = lp.getGeneRep(hitData.desc);
        				if (gene != null && !geneSet.contains(gene)) {
        					geneSet.add(gene);
        					hitData.filter |= 8;
        				}
            			   			
        				if (type.equals("sp")) cntSwiss++;
        				else if (type.equals("tr")) cntTrembl++;
        				else if (type.equals("gi")) cntGI++;
        				else cntNT++;
        				String taxo = hitData.dbtaxo;
        				if (!taxoSet.contains(taxo)) taxoSet.add(taxo);
        				
        		/* Get best frame to use for ORF finding in CoreAnnotator */
        				if (hitData.isProtein!=1) continue; // could be nucleotide annoDB
        				
        				int ctgStart = hitData.ctgStart;
        				int ctgEnd = hitData.ctgEnd;
        		
           			int orient = 1;
        				if (ctgEnd <= ctgStart) { // sequence will be reverse complemented before ORF finding
        					int seqLen = loadSeqLen(CTGid);
        					ctgStart = seqLen - ctgStart +1;
        					ctgEnd = seqLen - ctgEnd + 1;
        					orient = -1;
        				}
        				int frame = ctgStart % 3;
        				if (frame==0) frame = 3;
        				if (orient<0) frame = -frame;
        				
        				int f = frame+3;
        				if (frameCnt[f]==0) {
        					frameStart[f] = ctgStart;
        					frameEnd[f] =  ctgEnd;
        					frameEval[f] = hitData.eVal;
        					if (bestFrame == NO_RF) bestFrame = f; // first hit is best eval
        				}
        				frameCnt[f]++;
        			} 
        	/* end loop through hits for this contig */
        			
        			if (bestAnno==null) bestAnno = bestEval;
        			
        			// finish setting reading frame for ORF finding
        			if (bestFrame != NO_RF) {
        				int bF=bestFrame; // highest Eval
        				if (frameEval[bF] != 0.0) {
	        				for (int i=0; i<7; i++) {
	        					if (bF == i) continue;
	        					
	        					if (frameCnt[i] > frameCnt[bF]) {
	        		        			double exp = Math.log(frameEval[bF]) * 0.95;
	        		        			if (frameEval[i] < Math.exp(exp)) bF=i;
		        				}
		        			}
        				}
	        			annoMap.put(CTGid, (bF-3) + ":" + 
	        					frameStart[bF] + ":" + frameEnd[bF] + ":" + frameEval[bF]);
	        			ctgData.pFrame=bF-3;
        			}
    		       	      			
        			// rank is based on the order of input from the blast file
        			// the bestAnno and bestEval are set to rank 1 so show in the view contigs
        			bestAnno.filter = bestAnno.filter | 32;
        			bestAnno.rank = 1; 
        			bestEval.filter = bestEval.filter | 16;
        			bestEval.rank = 1; 
    		       	ctgData.annoPID = bestAnno.Pid;
    		       	ctgData.evalPID = bestEval.Pid;
    		       	ctgData.bestmatchid = bestEval.hitName;
        			
        			ctgData.cnt_gene = geneSet.size();
        			ctgData.cnt_species = specSet.size();
        			ctgData.cnt_overlap = setFilterOlap(olapList);
        			ctgData.cnt_taxo = taxoSet.size();
        			ctgData.cnt_swiss = cntSwiss;
        			ctgData.cnt_trembl = cntTrembl;
        			ctgData.cnt_nt = cntNT;
        			ctgData.cnt_gi = cntGI;       	
        			
        			// save annoDB filters as to whether it is a best hit, etc
        			saveDBhitFilterForCtgHits(hitList);
        			
	    			// saves contig filters, PID, PIDov, and compute protein ORF    	       	
    	    		    saveDBHitCntsForCtg(ctgData);
        			
        			geneSet.clear(); specSet.clear(); olapList.clear(); 
        			taxoSet.clear(); hitList.clear();
        		}// end loop through contig list
        		sqlObj.commitChanges ( );
        		System.out.print("                                                           \r");
        		CoreMain.PrtSpMsgTime(2, "Finish filter", startTime);
		}
        catch ( Throwable err ) {
			ErrorReport.reportError(err, "Annotator - computing filters for DB hits");
			return;
        }
    }
    private int setFilterOlap(ArrayList <Olap> olapList) {
    			
    		for (int h=0; h < olapList.size(); h++) {
    		   Olap o1 = olapList.get(h);
    		   if (o1.isOlap==false) continue;
    		   
    		   int comp = 0;
    		   if (o1.start > o1.end) comp = 1;
    		
    		   for (int j=h+1; j< olapList.size(); j++) {
    			   Olap o2 = olapList.get(j);
    			   if (o2.isOlap==false) continue;
    			   
    			   // check if same orientation
    			   if (comp==1 && o2.start < o2.end) continue;
    			   if (comp==0 && o2.start > o2.end) continue;
    			   // o1 is better eval than o2 so if same coords, make o2 contained
    			   if      (isContained(comp, o2, o1)) o2.isOlap=false;
    			   else if (isContained(comp, o1, o2)) {o1.isOlap=false; break;}
    		   }
		}
    		// change filter
    		int cnt=0;
    	   	for (int h=0; h < olapList.size(); h++) {
    	    		   Olap o = olapList.get(h);
    	    		   if (o.isOlap) {
    	    			   int f = o.hitData.filter;
    	    			   f |= 1;
    	    			   o.hitData.filter = f;
    	    			   cnt++;
    	    		   }
		}
    	   	return cnt;
    }
    /********************************************************************
     * Load data
     */
    public int saveAnnoDB(String fileName, boolean isProtein, String dbtype, String dbtaxo, 
    		int bestHits, int uniqueHits, int totalHits, boolean subset, String dbdate,
    		String parameters) throws Exception
    {
	    	String strQ = null;
	    	int DBID=0;
	    	try {
	    		String fdate;
	    		
	    		if (dbdate==null) fdate = FileHelpers.getFileDate(fileName);
	    		else fdate = dbdate;
    		
	    		if (fdate==null) {
	    			CoreMain.PrtError("Not valid file " + fileName);
	    			return -1;
	    		}
	    		// CAS 2/9/15 add parameters which is blast program and parameters
	    		Connection dbc = sqlObj.getConn();
           	Statement stmt = dbc.createStatement(ResultSet.TYPE_FORWARD_ONLY,
               		ResultSet.CONCUR_READ_ONLY);
           	int isP = (isProtein) ? 1 : 0;
           	int isS = (subset) ? 1 : 0;
           	
	    		strQ = "INSERT INTO pja_databases " + 
	           "(AID,  path, isProtein, dbtype, taxonomy, " +
	           "nBestHits, nUniqueHits, nTotalHits, dbDate, addDate, subset, parameters ) " +
	           "VALUES (1, '" + fileName + "', " +
	           		isP + ", '" + dbtype + "', '" + dbtaxo + "', " +
	           		bestHits + ", " + 
	           		uniqueHits + ", " + 
	           		totalHits + ",'" + fdate + "', NOW()," +
	           		isS + ",'" + parameters + "' )";
	        stmt.executeUpdate( strQ );  
	        
	   		strQ = "select last_insert_id() as DBID";
            ResultSet rset = stmt.executeQuery( strQ );
            if (rset.next()) {
            		DBID = rset.getInt("DBID");
            }
	        stmt.close();
	        rset.close();
	        return DBID;
	    	}
	    	catch (SQLException e) {
	    		String err = "Error on query " + strQ;
	    		ErrorReport.reportError(e, err);
    			throw e;
	    	}   	
    }
    
    private int loadSeqLen (int CTGid) throws Exception
   	{
           int seqLen=0;
           try
           {
        	   		Connection dbc = sqlObj.getConn();
        	   		Statement stmt = dbc.createStatement(ResultSet.TYPE_FORWARD_ONLY,
               		ResultSet.CONCUR_READ_ONLY);
           		ResultSet rset = stmt.executeQuery("select consensus_bases from contig where CTGid=" + CTGid);
           		if (rset.next()) seqLen = rset.getInt(1);
           		else ErrorReport.die("Could not get sequence length for " + CTGid);
           		stmt.close();
           }
           catch (SQLException e) {ErrorReport.reportError(e, "Loading sequence length");}
           return seqLen;
   	}
    private ArrayList <HitData>loadHitDataForCtg (int CTGid) throws Exception
	{
        ArrayList <HitData> hitList= new ArrayList <HitData> ();
        try
        {
        		Connection dbc = sqlObj.getConn();
            Statement stmt = dbc.createStatement(ResultSet.TYPE_FORWARD_ONLY,
            		ResultSet.CONCUR_READ_ONLY);
            stmt.setFetchSize(Integer.MIN_VALUE); // supposed to prevent memory hogging on large query
   
            String strQ = 	"SELECT " +
            		"pja_db_unique_hits.DBID, pja_db_unique_hits.hitID," +
            		"pja_db_unique_hits.description, pja_db_unique_hits.species, " +
            		"pja_db_unique_hits.dbtype, pja_db_unique_hits.taxonomy, " +
            		"pja_db_unitrans_hits.PID, " +
            		"pja_db_unitrans_hits.e_value, pja_db_unitrans_hits.bit_score," +
            		"pja_db_unitrans_hits.ctg_start, pja_db_unitrans_hits.ctg_end, " +
            		"pja_db_unitrans_hits.blast_rank, pja_db_unique_hits.isProtein " +
		    		"FROM pja_db_unique_hits " +
		    		"JOIN pja_db_unitrans_hits " +
		    		"ON pja_db_unitrans_hits.DUHID = pja_db_unique_hits.DUHID " +
		    		"WHERE   pja_db_unitrans_hits.CTGID = " + CTGid + " " +
		    		"order by pja_db_unitrans_hits.PID";
		 
            ResultSet rset = stmt.executeQuery( strQ );
            while( rset.next() )
            {	
            		HitData hit = new HitData ();
                hit.DBid = rset.getInt(1);
                hit.hitName = rset.getString(2);
                hit.desc = rset.getString(3).trim().toLowerCase();
                hit.species = rset.getString(4).trim().toLowerCase();
                hit.dbtype =  rset.getString(5);
                hit.dbtaxo = rset.getString(6);
                
                hit.Pid = rset.getInt(7);
                hit.eVal = rset.getDouble(8); 
                hit.bitScore = rset.getDouble(9);
		        hit.ctgStart = rset.getInt(10);
		        hit.ctgEnd = rset.getInt(11); 
		        hit.rank = rset.getInt(12);
		        hit.isProtein = rset.getInt(13);  
		        hitList.add(hit);
	    		}
            if ( rset != null ) rset.close();
            if (stmt !=null) stmt.close();
            if (hitList.size()==0) return null; // happens with mismatch of .fasta and .tab blast file
	    		return hitList;
        }
        catch(Exception e) {
	        	ErrorReport.die(e, "Error: reading database loadDBhitDataForCtg");
	        	throw e;
        }	
	}
    private HashSet<String> loadUniqueHitSet()
    {
    		HashSet <String> hit = new HashSet<String>();
        try {
        		Connection dbc = sqlObj.getConn();
        		Statement stmt = dbc.createStatement(ResultSet.TYPE_FORWARD_ONLY,
            		ResultSet.CONCUR_READ_ONLY);
            
            ResultSet rset = stmt.executeQuery( "SELECT hitID FROM pja_db_unique_hits " );
 
            while(rset.next()) {
            		hit.add(rset.getString("hitID"));
            }
            stmt.close();
            rset.close();
            return hit;
        }
        catch (SQLException e) {ErrorReport.die(e, "Error on loadUniqueHitSet"); return null;}   		
    }

    /********************************************************************
     * Save data
     */
    private void saveDBHitCntsForCtg(CtgData ctg) 
    {
    		String strQ="";
    		try
    		{      
    			Connection dbc = sqlObj.getConn();
	    		Statement stmt = dbc.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, 
		    		ResultSet.CONCUR_UPDATABLE );
 		        
    	        strQ = "UPDATE contig SET " +
    	        			"  PIDov = " + ctg.annoPID + 
    	        			", PID   = " + ctg.evalPID + 
    	        			", bestmatchid = '" + ctg.bestmatchid + "'" +
	        			", cnt_overlap = " + ctg.cnt_overlap +
	        			", cnt_gene = "    + ctg.cnt_gene +
	        			", cnt_species = " + ctg.cnt_species +
	        			", cnt_taxo = "    + ctg.cnt_taxo +
	        			", cnt_swiss = "   + ctg.cnt_swiss +
	        			", cnt_trembl = "  + ctg.cnt_trembl +
	        			", cnt_nt = "      + ctg.cnt_nt +
	        			", cnt_gi = "      + ctg.cnt_gi +
	        			", p_frame = "		+ ctg.pFrame +
    	        			" WHERE CTGid = " + ctg.CTGid;
    	        stmt.executeUpdate(strQ);
    			stmt.close();
       	}
    		catch (SQLException e) {ErrorReport.reportError(e, "Error on query " + strQ);}
    }
    private boolean saveDBhitForCtg (BlastHitData hitData ) 
    {
    		String strQ = null;
   		
	    	try
	    	{     	 
	    		String b = (hitData.hitIsProtein()) ? "1" : "0";
	    		Connection dbc = sqlObj.getConn();
	    		Statement stmt = dbc.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, 
		    		ResultSet.CONCUR_UPDATABLE );
	    		strQ = "INSERT INTO pja_db_unitrans_hits " + 
		           "(CTGID, AID, DUHID, contigid, uniprot_id, percent_id, alignment_len," +
		           "mismatches, gap_open, ctg_start, ctg_end, prot_start, prot_end, " +
		           "bit_score , e_value, dbtype, taxonomy, blast_rank, isProtein, " +
		           "cnt_hsps) " +
		           "VALUES (  " +
           String.valueOf( hitData.getCTGID()) + 
           ", 1, 0, \"" + hitData.getContigID() + 
           "\", \"" + hitData.getHitID() + "\", " +             
           hitData.getPercentID()  + ", " + 
           hitData.getAlignLen()  + ", " + 
           hitData.getMisMatches()  + ", " + 
           hitData.getGapOpen()  + ", " + 
           hitData.getCtgStart()  + ", " + 
           hitData.getCtgEnd()  + ", " + 
           hitData.getHitStart()  + ", " + 
           hitData.getHitEnd()  + ", " +            
           hitData.getBitScore()  + ", " +       
           hitData.getEVal()  + ", \"" + 
           hitData.getDBtype() + "\", \"" + hitData.getDBtaxo() + "\", " +     
           hitData.getBlastRank()  +  ", " +	             
           b + "," +
           hitData.getCntHSPs() + ")";

	       stmt.executeUpdate( strQ );   
	        
	       // the pid gets updated in the contig record in the calling routine
	       strQ = "select last_insert_id() as pid";
	       ResultSet rset = stmt.executeQuery( strQ );
           if (rset.next()) {
        	   		int pid = rset.getInt("pid");
        	   		hitData.setPID(pid);
           }
	       stmt.close();
	       rset.close();
	       return true;
	   }
       catch (SQLException e) {
			ErrorReport.reportError(e, "Error on query " + strQ);
			return false;
    		}   			
    }
    private void saveDBhitFilterForCtgHits(	ArrayList <HitData> hitList) 
    {
    		String strQ="";
    		try
    		{   
    			Connection dbc = sqlObj.getConn();
	    		Statement stmt = dbc.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, 
		    		ResultSet.CONCUR_UPDATABLE );
	    		
	    		for (int h=0; h < hitList.size(); h++) {
	    			HitData hitData = hitList.get(h);
	    			
	    			int best=0, bestov=0, olap = 0, top3=0, species=0, gene=0;
	        		int filtered = hitData.filter;
	        		if ((filtered & 16) != 0) best  = 1;
	        		if ((filtered & 32) != 0) bestov  = 1;
	        		if ((filtered & 1) != 0) olap  = 1;
	        		if ((filtered & 2) != 0) top3  = 1;
	        		if ((filtered & 4) != 0) species  = 1;
	        		if ((filtered & 8) != 0) gene  = 1;
	    	
	    			strQ = "UPDATE pja_db_unitrans_hits SET " +
	    			    //" blast_rank = " + hitData.rank + "," + // CAS 26Jun15 had used this for best&rank
    					" filtered = " + filtered + "," +
    					" filter_best = " + best + "," +
       				" filter_ovbest = " + bestov + "," +
       				" filter_olap = " + olap + "," +
    					" filter_top3 = " + top3 + "," +
    					" filter_species = " + species + "," +
    					" filter_gene = " + gene + " " + 
    					" WHERE PID = '" + hitData.Pid + "' ";  
	    			stmt.executeUpdate(strQ);
	    		}
    			stmt.close();
       	}
    		catch (SQLException e) {
    			String err = "Error on query " + strQ;
    			ErrorReport.die(e, err);
        	}
    }
 
    private void saveDBhitUnique(int DBID, boolean isProtein, String dbtype, String dbtaxo, 
    		String hitID, String otherID, String description, String species, String sequence, int cntUni) 
    			throws Exception
    {    
		try {
    			ResultSet rset=null;
    			int DUHID=0;
    			Connection dbc = sqlObj.getConn();
			Statement stmt = dbc.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, 
	    		ResultSet.CONCUR_UPDATABLE );
			
			// The following situation can make this happen:
    			// 	-v 25 does not necessarily give 25 hits, so one may not be hit in species
    			// 	but hit in all; will get the the species DUHID, but have the ALL taxo.
			if (duidMap.containsKey(hitID)) DUHID = duidMap.get(hitID);
	        else {
		        	// a " occurred in UniProt description which causes an SQL error
		        	if (description.indexOf("\"") != -1) {
		        		String x = description.replace("\"", "");
		        		description = x;
		        	}

		        	String iP = (isProtein) ? "1" : "0";
		         
		        	stmt.executeUpdate("INSERT INTO pja_db_unique_hits" + 
		           "(AID, DBID,  hitID, repID, dbtype, taxonomy, isProtein, nBestHit, nUnitranHit, " +
		           "description, species, length, sequence) " +
		           "VALUES (  1," + DBID + ",\"" + hitID + "\", \"" + 
		           otherID + "\", \"" + dbtype + "\", \"" + dbtaxo + 
		           "\", \"" + iP + "\", 0, 0, \"" + 
		           description + "\", \"" + species + "\", " +   
		           sequence.length() + ",\"" + sequence + "\" )");   
		        
	            rset = stmt.executeQuery( "select last_insert_id() as DUHID");
	            if (rset.next()) {
	            		DUHID = rset.getInt("DUHID");
	            }    
	        }
	        if (DUHID == 0 ) {
	        		CoreMain.PrtError("Internal: could not get DUHID for " + hitID);
	        		return;
	        }
	        // update all unitran records with DUHID
            stmt.executeUpdate("UPDATE pja_db_unitrans_hits SET DUHID=" + DUHID +
            		" WHERE uniprot_id = '" + hitID + "' ");
            duidMap.put(hitID, DUHID);
    
            stmt.close();
            if (rset != null) rset.close();
        }
        catch (SQLException e) {
	        	ErrorReport.die(e, "Error saving hit description to database");
        }   
    }
    /******************* Instance variables ****************************/
    // called from CoreMain
	public void setFlankingRegion(int f) {flank = f; }
	 // is o1 contained in o2 
	private boolean isContained(int comp, Olap o1, Olap o2) {
		if (comp==0 && o1.start >= o2.start-flank && o1.end <= o2.end+flank) 
			return true;
		else if (comp==1 && o1.end >= o2.end-flank && o1.start <= o2.start+flank) 
			return true;
		return false;
	}

	private class Olap {
		Olap (HitData h) {
			start = h.ctgStart;
			end = h.ctgEnd;
			hitData = h;
		}
		int start;
		int end;
		HitData hitData = null;
		boolean isOlap = true;
	}
	private class CtgData {
  		int CTGid, PIDov,  annoPID, evalPID;

  		String bestmatchid;
  		int cnt_overlap, cnt_gene, cnt_species, cnt_taxo;
  		int cnt_swiss, cnt_trembl, cnt_nt, cnt_gi;
  		int pFrame=0;
    }
	private class HitData implements Comparable <HitData> {
 		 String desc, species, dbtype, dbtaxo;
 		 double eVal, bitScore;
	     int DBid, Pid, CTGid;
	     String hitName;
	     int ctgStart, ctgEnd, isProtein;
	     
	     int filter=0, rank=0;
	     
	     public int compareTo(HitData b) {
	    		if (this.eVal < b.eVal) return -1;
	    		if (this.eVal == b.eVal && this.bitScore > b.bitScore) return -1;
	    		if (this.eVal == b.eVal && this.bitScore == b.bitScore) return 0;
	    		return 1;
 		}
	 }
	// current file
	private int DBID = 0;
	private int dbNum = 0;
	private String dbLabelType = "";
	private String dbTaxo = "";
	private String dbType = "";
	private boolean isProtein = true;
    
	private HashSet<String> hitAddToSTW = new HashSet<String> (); // unique set in dbFasta
	private HashSet<String> hitInSTC = new HashSet<String> (); // already in sTCW
	
	private ContigData curCtgData = new ContigData ();
   	private ArrayList <BlastHitData> curHitDataForCtg = new ArrayList <BlastHitData> ();
   	
    private TreeMap<String, Integer> ctgMap = new TreeMap<String, Integer> (); 
    private HashMap<Integer, String> annoMap = new HashMap<Integer, String> ();
    
	private int flank = 0;
	private HashMap <String, Integer> duidMap = new HashMap <String, Integer> ();
	
	// counts
	private int nAnnoCtg = 0; 
    private int nTotalHits = 0;
    private int nTotalBadHits = 0, nWarningHits=0;
	HashSet <String> notFoundHit = new HashSet <String> ();
  		
    private CoreDBWrapper sqlObj = null; 		// created in coreAnnotator
    private DoBlast blastObj = null;				// created in CoreMain
    private CoreAnnotator annoObj = null;	
}
