package business.application;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;

import business.application.TFCalculatorNoDB.TF_DOC;
import business.search_eng.Document;
import business.search_eng.PostingList;
import business.search_eng.PostingListItem;
import business.search_eng.cleaner.EnglishStemmer;

import org.kth.dks.JDHT;
import org.kth.dks.JDHTReputated;
import org.kth.dks.dks_exceptions.DKSIdentifierAlreadyTaken;
import org.kth.dks.dks_exceptions.DKSRefNoResponse;
import org.kth.dks.dks_exceptions.DKSTooManyRestartJoins;

import com.mysql.jdbc.ResultSet;
import com.mysql.jdbc.Statement;

public class DBServer {
	
	public static int numOfNodes = 32;
	private static ArrayList<JDHT> nodeList;
	public static int numOfFiles = 1024;
	private static ArrayList<Document> documentList;
	
	public static void main(String[] arg) {
		try {
			boolean useThisQuery = false;
			
			if (arg.length>=1) 
				numOfNodes = Integer.parseInt(arg[0]);
			if (arg.length>=2) {
				numOfFiles = Integer.parseInt(arg[1]);

		    	if(numOfFiles <=0) {
		    		if(numOfFiles<0) {
		    			useThisQuery = true;
		    		} numOfFiles = 20100;
		    	}
			}
			
	        long ini = System.currentTimeMillis();
				// Could declare myDHT as a java.util.Map
				// default port 44000 	
				JDHT DHTNode = createOverlayNetwork();
							
				// FIRST: Fill the overlay net with the docs k->v docId->docContnet
				fillOverlayNetworkWithCollection(DHTNode,useThisQuery);
				//SECOND: Fill the on with the posting lists k->v word->postinglist
				fillOverlayNetworkWithVocabulary(DHTNode);
	        long end = System.currentTimeMillis();
	        long time = end-ini;
	        System.out.println(time+" ms elapsed");
			
			// Print a reference to this node			
			System.out.println( ((JDHT)nodeList.get(numOfNodes-1)).getReference() );
			
			// Wait for input before exiting
			BufferedReader cin = new BufferedReader( new InputStreamReader(System.in) );
			cin.read(); 
			
			// shut down
			DHTNode.close(); 
			
		} catch(Exception ex) { ex.printStackTrace(); }
	}


	private static JDHT createOverlayNetwork() throws IOException,
			DKSTooManyRestartJoins, DKSIdentifierAlreadyTaken, DKSRefNoResponse {
		int port = 44000;
		nodeList = new ArrayList<JDHT>();
		JDHT a = new JDHT();
		
		
		JDHT DHTNode = new JDHTReputated(port,-1*Math.random());
				//0.0);//-1*Math.random());
		nodeList.add(DHTNode);
		int i;
		System.out.println("Creating nodes on localhost");
		for(i= 1; i< numOfNodes; i++) {
			if(i%10==0) { System.out.println(i+" of "+numOfNodes+" done."); }
			nodeList.add(new JDHTReputated(port+i, ((JDHT)nodeList.get(i-1)).getReference(),-1*Math.random()));
					//0.0));// -1*Math.random()));
		}
		return DHTNode;
	}
	
	private static void fillOverlayNetworkWithCollection(JDHT DHTNod, boolean useThisQuery) {
		
		documentList = new ArrayList<Document>();
		EnglishStemmer stemmer = new EnglishStemmer();
		
		Statement stmt = null;
		ResultSet srs = null;
		
		//new connection to BBDD
		try {
			Class.forName("com.mysql.jdbc.Driver").newInstance();
			Connection conn = DriverManager.getConnection("jdbc:mysql://localhost/tfcmargon?" +
                    "user=zasca&password=zasca2009");
			
			stmt = (Statement) conn.createStatement();//(ResultSet.TYPE_SCROLL_SENSITIVE,ResultSet.CONCUR_READ_ONLY);
			 if(!useThisQuery) {
	             	srs = (ResultSet) stmt.executeQuery("SELECT id, title, subtitle, author, content, url, date, topic FROM tfcmargon.news_rawtext");
	             } else {
	             	System.out.println("news and life");
	             	srs = (ResultSet) stmt.executeQuery("SELECT id, title, subtitle, author, content, url, date, topic FROM tfcmargon.news_rawtext WHERE topic like 'life' or topic like 'news' or topic like 'spam'");
	             }				
			//do select & cross all resultset (now files) 
			int i=0;
			System.out.println("Adding files to the DHT");
		    while (srs.next() && i < numOfFiles) {
		    	i++;
		    	if(i%100==0) { System.out.println(i+" of "+numOfFiles+" done."); }
				//retrieve all fields
		        int id = srs.getInt("id");
		        String title = srs.getString("title");
		        String subtitle = srs.getString("subtitle");
		    	String content = srs.getString("content");
		        String author = srs.getString("author");
		        String url = srs.getString("url");
		    	String date = srs.getString("date");
		        String topic = srs.getString("topic");
		     
		        // Insert data into the DHT
				Document document = new Document(id, title, subtitle, author, content, url, date, topic);
				Document parsed = new Document(id, stemmer.Stemm(title), stemmer.Stemm(subtitle), author, stemmer.Stemm(content), url, date, topic);

				documentList.add(parsed);
						
				nodeList.get(numOfNodes-1).put("id//"+String.valueOf(id),document);
		    }

			conn.close();
			
		} catch (InstantiationException e) {
			e.printStackTrace();
		} catch (IllegalAccessException e) {
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
		} catch (SQLException e) {
			e.printStackTrace();
		}
		finally {
		    // it is a good idea to release resources in a finally{} block
		    // in reverse-order of their creation if they are no-longer needed
		    if (srs != null) {
		        try {
		            srs.close();
		        } catch (SQLException sqlEx) { } // ignore

		        srs = null;
		    }
		    if (stmt != null) {
		        try {
		            stmt.close();
		        } catch (SQLException sqlEx) { } // ignore

		        stmt = null;
		    }
		}
	}
	
	private static void fillOverlayNetworkWithVocabulary(JDHT DHTNode) {
		
		TFCalculatorNoDB tfCalculator = TFCalculatorNoDB.INSTANCE;

		Iterator<Document> itDoc = documentList.iterator();
		Document tempDoc = null;
		int i =0;
    	System.out.println("Analysing TF of the docs");
		while ( itDoc.hasNext()) {
	    	if(i%100==0) { System.out.println(i+" of "+numOfFiles+" done."); }
	        
	        tempDoc = itDoc.next();
	        //calcule tf of every document
	        tfCalculator.fillDocumentDictionary(tempDoc.id ,tempDoc.getFieldsNames(),tempDoc.getFieldsContents());
	        i++;
	        
		}
		
		i=0;
		System.out.println("Adding references to the DHT");
		Iterator<String> bagOfWord = tfCalculator.invertedIndex.keySet().iterator();
		while(bagOfWord.hasNext()) {
			
			HashMap<String, Double> fieldsValues = new HashMap<String, Double>();
			
			JDHTReputated dhtnode =  (JDHTReputated) nodeList.get( (int) Math.round( Math.random()*(numOfNodes-1) ) );
			
			fieldsValues.put("reputation", dhtnode.reputation);

			        
	        if(i%100==0 || i ==0) {
	        	System.out.println(i+" of "+tfCalculator.invertedIndex.keySet().size()+" done.");
        	}
		

			String mainTerm = bagOfWord.next();
			if(!TFCalculatorNoDB.isPunctuation(mainTerm) ) {
				PostingList postingList = new PostingList(mainTerm);
				Iterator<TF_DOC> tf_doc_s  = tfCalculator.invertedIndex.get(mainTerm).iterator();
				while(tf_doc_s.hasNext()) {
					TF_DOC temp_tf_doc = tf_doc_s.next();
					PostingListItem item = new PostingListItem("id//"+temp_tf_doc.id, temp_tf_doc.termFreqs,fieldsValues);
					postingList.addDocument(item);
				}
				
				dhtnode.put(postingList.word,postingList);

			}
			i++;
        }
	        
	}
	
}
