package com.pc.cloud.core;

import com.pc.cloud.services.FacetQueryArg;
import com.pc.cloud.services.FacetSearchRow;
import com.pc.cloud.services.MetricArg;
import com.pc.cloud.services.SearchArgs;
import com.pc.cloud.utils.BoundedTreeSet;
import com.pc.cloud.utils.DocIterator;
import com.pc.cloud.utils.DocSet;
import com.pc.cloud.utils.NamePair;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.TermQuery;

import java.util.*;

public class FacetSearch {
	public List<NamePair> getFieldCacheCounts(LuceneSearcher searcher,DocSet docs, String fieldName) throws Exception {
		int limit = 100;
	    int mincount = 1;
	    int startTermIndex=1;
		Boolean isMultiValues = false;
		String field = fieldName;
		if(fieldName.startsWith(SearchArgs.MULTI_VALUE_FLAG)) {
			field = fieldName.substring(6);
			isMultiValues = true;
		}
		
		List<NamePair> pairs = new ArrayList<NamePair>();
		FieldCache.StringIndex si = FieldCache.DEFAULT.getStringIndex(searcher.getReader(), field);
	    final String[] terms = si.lookup;	    
	       
	    
	    int nTerms = terms.length;
	    if(nTerms > 0 && docs.size() > 0) {
	    	 final int[] counts = new int[nTerms];
	    	 
	    	 if(isMultiValues) {
	    		 for(int i=1; i<nTerms;i++) {
	    			 String t = terms[i];			 
	    			 counts[i-1] = searcher.numDocs(new TermQuery(new Term(field,t)), docs);
	    		 }
	    	 } else {
	    		 final int[] termNum = si.order;	
	    		 DocIterator iter = docs.iterator();
		         while (iter.hasNext()) {
		           int term = termNum[iter.nextDoc()];
		           int arrIdx = term - startTermIndex;
		           if (arrIdx>=0 && arrIdx<nTerms) counts[arrIdx]++;
		         }
	    	 }
	    	 
	         
	         final BoundedTreeSet<CountPair<String,Integer>> queue = new BoundedTreeSet<CountPair<String,Integer>>(nTerms);
	         for (int i =0; i<nTerms; i++) {
	        	  int c = counts[i];
	        	  if(c < mincount) {
	        		  continue;
	        	  }
	        	  queue.add(new CountPair<String,Integer>(terms[i + startTermIndex], c));		          
		     }
	         int offset = 1; 
	         for (CountPair<String,Integer> p : queue) {
	        	 if(offset >= limit) {
	        		 break;
	        	 }
	        	 NamePair np = new NamePair(p.key,String.valueOf(p.val));
		         pairs.add(np);
		         offset++;
	         }
	    }
	    
	    return pairs;
	}
	
	public Collection<NamePair> getSum(LuceneSearcher luceneSearcher, DocSet docs,
			List<MetricArg> metricFields) throws Exception {
		List<NamePair> pairs = new ArrayList<NamePair>();
		long[] result = new long[metricFields.size()];
 		if(docs.size() > 0) { 			
 			long[][] values = new long[metricFields.size()][];
 			for(int i=0;i< metricFields.size();i++) {
 				MetricArg sa = metricFields.get(i);
 				if(sa.getType() != MetricArg.COUNT) {
 					values[i] = FieldCache.DEFAULT.getLongs(luceneSearcher.getReader(), sa.getFieldName(), FieldCache.DEFAULT_LONG_PARSER);
 				}			
 			}
 			Map<Long, Boolean> identical[] = new HashMap[metricFields.size()];
			for(int i=0; i< metricFields.size();i++) {
				if(metricFields.get(i).getType() == MetricArg.DISTINCT_COUNT) {
					identical[i] = new HashMap<Long,Boolean>();
				}				
			}
 			
			DocIterator iter = docs.iterator();
	        while (iter.hasNext()) {
	        	int docid = iter.nextDoc();
	        	for(int i=0;i< metricFields.size();i++) {
	        		MetricArg sa = metricFields.get(i);
	        		switch(sa.getType()) {
	        		case MetricArg.COUNT: {
	        			result[i]++;
	        			break;
	        		}
	        		case MetricArg.DISTINCT_COUNT:{
	        			long value = values[i][docid];
	        			if(identical[i].get(value) == null || !identical[i].get(value)) {
	        				identical[i].put(value, true);
	        				result[i]++;
	        			}	        			
	        			break; 
	        		}
	        		case MetricArg.SUM : {
	        			long value = values[i][docid];
	        			result[i] = result[i] + value;
	        			break;
	        		}	        		 
	        		}
	        	 }
	         }
		}
		
 		for(int i=0;i< metricFields.size();i++) {
 			MetricArg sa = metricFields.get(i);
 			NamePair np = new NamePair(sa.getFieldName(),String.valueOf(result[i]));
 			pairs.add(np);
 		}
		return pairs;
	}
	
	public Collection<FacetSearchRow> getFacetSum(LuceneSearcher luceneSearcher,
			DocSet docs, FacetQueryArg fs) throws Exception {

		List<FacetSearchRow> rows = new ArrayList<FacetSearchRow>();
		FieldCache.StringIndex si = FieldCache.DEFAULT.getStringIndex(
				luceneSearcher.getReader(), fs.getFacetField());
		final String[] terms = si.lookup;
		final int[] termNum = si.order;

		int startTermIndex = 1;
		int endTermIndex = terms.length;
		
		int nTerms = terms.length;
		if (nTerms > 0 && docs.size() > 0) {
			List<MetricArg> metricFields = fs.getMetricFields();
			long[][] values = new long[metricFields.size()][];
			for (int i = 0; i < metricFields.size(); i++) {
				MetricArg sa = metricFields.get(i);
				if (sa.getType() != MetricArg.COUNT) {
					values[i] = FieldCache.DEFAULT.getLongs(luceneSearcher
							.getReader(), sa.getFieldName(),
							FieldCache.DEFAULT_LONG_PARSER);
				}
			}
			final long[][] counts = new long[nTerms][metricFields.size()];
			Map<Long, Boolean> identical[][] = new HashMap[nTerms][metricFields.size()];
			
			DocIterator iter = docs.iterator();
			while (iter.hasNext()) {
				int docid = iter.nextDoc();
				int term = termNum[docid];
				int arrIdx = term - startTermIndex;
				if (arrIdx >= 0 && arrIdx < nTerms) {
					for (int i = 0; i < metricFields.size(); i++) {
						MetricArg sa = metricFields.get(i);
						switch (sa.getType()) {
						case MetricArg.COUNT: {
							counts[arrIdx][i]++;
							break;
						}
						case MetricArg.DISTINCT_COUNT: {
							long value = values[i][docid];							
							Map<Long,Boolean> item = identical[arrIdx][i];
							if(item == null) {
								item = new HashMap<Long,Boolean>();
								identical[arrIdx][i] = item;
							}
							if (item.get(value) == null
									|| !item.get(value)) {
								item.put(value, true);
								counts[arrIdx][i]++;
							}
							break;
						}
						case MetricArg.SUM: {
							long value = values[i][docid];
							counts[arrIdx][i] = counts[arrIdx][i] + value;
							break;
						}
						}
					}
				}
			}
			
			int limit = fs.getLimit();
			int offset = fs.getOffset();
			int maxsize = limit>0 ? offset+limit : nTerms;
			
			int total = 0;
			
			
			final BoundedTreeSet<CountPair<FacetSearchRow,Long>> queue = new BoundedTreeSet<CountPair<FacetSearchRow,Long>>(maxsize);
			for(int i=0;i<terms.length;i++) {
				long[] s = counts[i];
				boolean flag = false;
				boolean rangeFlag = true;
				for(int j=0;j<s.length;j++) {
					if(s[j] > 0) {
						flag = true;
					}
				}
				
				//range
				for (int j = 0; j < s.length; j++) {					
					if(metricFields.get(j).getMin() > s[j] || metricFields.get(j).getMax() < s[j]) {
						rangeFlag = false;
					}
				}
				
				if(flag && rangeFlag) {
					FacetSearchRow fsr = new FacetSearchRow();
					fsr.setName(terms[startTermIndex+i]);
					List<NamePair> pairs = new ArrayList<NamePair>();
					long sortValue = 0;
					for(int j=0;j< metricFields.size();j++) {
						if(fs.getSort()!= null && metricFields.get(j).getFieldName().equals(fs.getSort())) {
							sortValue =  counts[i][j];
						} else if(j == 0) {
							sortValue =  counts[i][j];
						}
						
						NamePair np = new NamePair(metricFields.get(j).getFieldName(),String.valueOf(counts[i][j]));
						pairs.add(np);
						
					}
					fsr.setPairs(pairs);
					CountPair<FacetSearchRow,Long> cp = new CountPair<FacetSearchRow,Long>(fsr,sortValue);
					queue.add(cp);
					total++;
				}
			}	

			int off=offset;
		    int lim=limit>=0 ? limit : Integer.MAX_VALUE;
			for(CountPair<FacetSearchRow,Long> np : queue) {
				if (--off>=0) continue;
		        if (--lim<0) break;
		        np.key.setTotal(total);
				rows.add(np.key);
			}			
		}		
		return rows;
	}

	public static class CountPair<K extends Comparable<? super K>, V extends Comparable<? super V>>
			implements Comparable<CountPair<K, V>> {

		public CountPair(K k, V v) {
			key = k;
			val = v;
		}

		public K key;
		public V val;

		public int hashCode() {
			return key.hashCode() ^ val.hashCode();
		}

		public boolean equals(Object o) {
			return (o instanceof CountPair)
					&& (0 == this.compareTo((CountPair<K, V>) o));
		}

		public int compareTo(CountPair<K, V> o) {
			int vc = o.val.compareTo(val);
			return (0 != vc ? vc : key.compareTo(o.key));		
		}
	}
}