package cassandra;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

import me.prettyprint.cassandra.serializers.StringSerializer;
import me.prettyprint.hector.api.Cluster;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.beans.OrderedRows;
import me.prettyprint.hector.api.beans.Row;
import me.prettyprint.hector.api.exceptions.HectorException;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.Mutator;
import me.prettyprint.hector.api.query.ColumnQuery;
import me.prettyprint.hector.api.query.QueryResult;
import me.prettyprint.hector.api.query.RangeSlicesQuery;

import com.gash.scrape.FileParser;
import com.gash.scrape.ParseObject;

public class CassandraTest {
	private Properties config = new Properties();
	private static StringSerializer stringSerializer = StringSerializer.get();
	private Cluster cluster;
	private Keyspace keyspace;

	public static void main(String... args) throws Exception {

		CassandraTest ct = new CassandraTest();
		// Perform insert benchmark
		List<ParseObject> pages = FileParser.parse("testdata/hugepages.txt");
		long startTimeInsert = System.currentTimeMillis();
		ct.insertPages(pages);
		long endTimeInsert = System.currentTimeMillis();
		System.out.println("Time spent to insert " + pages.size() + ": "+ (endTimeInsert - startTimeInsert) + " ms");
		ct.searchForKeyword();
		
	}

	public CassandraTest() {
		loadProperties();
		String host = config.getProperty("host") + ":" + Integer.parseInt(config.getProperty("port"));
		String cluster = config.getProperty("cluster");
		this.cluster = HFactory.getOrCreateCluster(cluster, host);
		this.keyspace = HFactory.createKeyspace("Wikistore", this.cluster);
	}
	
	//Search for keyword "linux" and return 
	private void searchForKeyword() {
		
		// Get all Pages that has keyword "linux"
		long startTimeFetchKeyHits = System.currentTimeMillis();
		long startTimeFetchTotal = System.currentTimeMillis();
		RangeSlicesQuery<String, String, String> rangeSlicesQuery =
            HFactory.createRangeSlicesQuery(keyspace, stringSerializer, stringSerializer, stringSerializer);
        rangeSlicesQuery.setColumnFamily("Keywords");
        rangeSlicesQuery.setKeys("linux", "linux");
        rangeSlicesQuery.setRange("", "", false, 20000);
        rangeSlicesQuery.setRowCount(1);
        QueryResult<OrderedRows<String, String, String>> result = rangeSlicesQuery.execute();
		long endTimeFetchKeyHits = System.currentTimeMillis();
        OrderedRows<String, String, String> orderedRows = result.get();
        int counter = 0;
        List<String> urls = new ArrayList<String>();
        for (Row<String, String, String> r : orderedRows) {
            List<HColumn<String, String>> colSlice = r.getColumnSlice().getColumns();
            for (HColumn<String, String> col : colSlice) {
                urls.add(col.getValue());
            }
        }
        
        long startTimeFetchTitle = System.currentTimeMillis();
        List<ParseObject> hitList = new ArrayList<ParseObject>();
        for(String key : urls) {
			ColumnQuery<String, String, String> columnQuery = HFactory.createStringColumnQuery(keyspace);
	        columnQuery.setColumnFamily("Pages").setKey(key).setName("title");
	        QueryResult<HColumn<String, String>> r2 = columnQuery.execute();
	        ParseObject po = new ParseObject();
	        po.title = r2.get().getValue();
	        po.url = key;
	        hitList.add(po);
        }
        long endTimeFetchTitle = System.currentTimeMillis();
        long endTimeFetchTotal = System.currentTimeMillis();
        System.out.println("Time spent to look up keys that has keyword \"linux\" in CF Keywords: " + (endTimeFetchKeyHits - startTimeFetchKeyHits) + " ms");
        System.out.println("Time spent to look up Pages.title that has id as key in CF Pages: " + (endTimeFetchTitle- startTimeFetchTitle) + " ms");
        System.out.println("Time spent total: " + (endTimeFetchTotal - startTimeFetchTotal) + " ms");
        System.out.println("Hits: " + hitList.size());
        for(ParseObject hit : hitList) {
        	System.out.println("Title: " + hit.title);
        	System.out.println("Url: "+ hit.url);
        }
        cluster.getConnectionManager().shutdown();

	}

	private void insertPages(List<ParseObject> pages) {
		try {
			Mutator<String> mutator = HFactory.createMutator(keyspace,
					stringSerializer);
			// Insert all Pages, Keywords, Links, Citations
			for (ParseObject page : pages) {
				// Since the Wikipedia URL is unique we use it as the key
				String id = page.url;
				mutator.addInsertion(id, "Pages", HFactory.createStringColumn("title", page.title));
				mutator.addInsertion(id, "Pages", HFactory.createStringColumn("url", page.url));
				// Insert keywords in Standard Column Family "Keywords", we use
				// the id of the Pages row
				for (String keyWord : page.keywords) {
					if (keyWord != null && !"".equals(keyWord))
						mutator.addInsertion(keyWord, "Keywords", HFactory.createStringColumn(id, page.url));
				}
				for (String citation : page.citations) {
					if (citation != null && !"".equals(citation))
						mutator.addInsertion(citation, "Citations", HFactory.createStringColumn(id, page.url));
				}
				for (String link : page.links) {
					if (link != null && !"".equals(link))
						mutator.addInsertion(link, "Links", HFactory.createStringColumn(id, page.url));
				}
				mutator.execute();
			}
			

		} catch (HectorException e) {
			e.printStackTrace();
		}
	}

	private void loadProperties() {
		InputStream in = CassandraTest.class.getClassLoader().getResourceAsStream("cassandra-test.properties");
		try {
			config.load(in);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}