package classifier;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Vector;

import model.BlogPost;

public class WekaAdapter {
	// Weka's attribute index starts from 1 while Java containers start from 0
	public static final int INDEX_PADDING = 1;

	/**
	 * Reads a file list as input, and save the content of the files into an
	 * arff file as output. The file list must be provided in the comma
	 * separated form, the first field being the blog_id and the second field
	 * being the sentiment polarity. Only the summary is kept in the arff file.
	 */
	public void toArffSummary(String input, String output, String annotator) {
		HashMap<String, Integer> posts = new HashMap<String, Integer>();
		// Read the file list
		FileInputStream fstream;
		try {
			fstream = new FileInputStream(input);
			BufferedReader in = new BufferedReader(new InputStreamReader(
					fstream));
			String line;
			do {
				line = in.readLine();
				if (line != null) {
					posts.put(line.substring(0, line.length() - 2), Integer
							.parseInt(line.substring(line.length() - 1, line
									.length())));
				}
			} while (line != null);
			fstream.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (NumberFormatException e) {
			e.printStackTrace();
		}
		// Retrieve the files and save them into an arff file
		FileOutputStream outStream;
		try {
			outStream = new FileOutputStream(output);
			BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
					outStream));
			// Write the header first
			writer.write("@relation blogposts\n");
			writer.write("@attribute Text string\n");
			writer.write("@attribute class {2,4}\n");
			writer.write("@data\n");
			// Load the instances
			DatabaseAdapter ca = new DatabaseAdapter();
			ca.initialize("jdbc:mysql://goanna.cs.rmit.edu.au:56555/research");

			double driftingness = 0;
			for (String blogID : posts.keySet()) {
				BlogPost post = ca.getPost(blogID, annotator);
				driftingness += post.getDriftingnessByLength();
				String postContent = post.getTopical();
				postContent = postContent.replace('\n', ' ');
				postContent = postContent.replace('\r', ' ');
				postContent = postContent.replace("\'", "\\\'");
				postContent = postContent.replace("\"", "\\\"");
				// System.out.println(count++ + ":" + postContent.length());
				writer.write("'" + postContent + "'," + posts.get(blogID)
						+ "\n");
			}
			System.out.println("Average driftingness: " + driftingness
					/ posts.size());
			writer.flush();
			outStream.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	/**
	 * Reads a file list as input, and save the content of the files into an
	 * arff file as output. The file list must be provided in the comma
	 * separated form, the first field being the blog_id and the second field
	 * being the sentiment polarity. The full post is kept in the arff file.
	 */
	public void toArffFulltext(String input, String output) {
		HashMap<String, Integer> posts = new HashMap<String, Integer>();
		// Read the file list
		FileInputStream fstream;
		try {
			fstream = new FileInputStream(input);
			BufferedReader in = new BufferedReader(new InputStreamReader(
					fstream));
			String line;
			do {
				line = in.readLine();
				if (line != null) {
					posts.put(line.substring(0, line.length() - 2), Integer
							.parseInt(line.substring(line.length() - 1, line
									.length())));
				}
			} while (line != null);
			fstream.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (NumberFormatException e) {
			e.printStackTrace();
		}
		// Retrieve the files and save them into an arff file
		FileOutputStream outStream;
		try {
			outStream = new FileOutputStream(output);
			BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
					outStream));
			// Write the header first
			writer.write("@relation blogposts\n");
			writer.write("@attribute Text string\n");
			writer.write("@attribute class {2,4}\n");
			writer.write("@data\n");
			// Load the instances
			DatabaseAdapter ca = new DatabaseAdapter();
			ca.initialize("jdbc:mysql://goanna.cs.rmit.edu.au:56555/research");

			for (String blogID : posts.keySet()) {
				String postContent = ca.getPostContent(blogID);
				postContent = postContent.replace('\n', ' ');
				postContent = postContent.replace('\r', ' ');
				postContent = postContent.replace("\'", "\\\'");
				postContent = postContent.replace("\"", "\\\"");
				// System.out.println(count++ + ":" + postContent.length());
				writer.write("'" + postContent + "'," + posts.get(blogID)
						+ "\n");
			}
			writer.flush();
			outStream.close();
		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	public void readARFF(String filename) {
		// Read the file list
		HashMap<String, Float> featureValueMap = new HashMap<String, Float>();
		Vector<String> words = new Vector<String>();
		FileInputStream fstream;
		try {
			fstream = new FileInputStream(filename);
			BufferedReader in = new BufferedReader(new InputStreamReader(
					fstream));
			String line;
			int attributeCount = 0;
			do {
				line = in.readLine();
				if (line != null) {
					// System.out.println("line length " +
					// line.trim().length());
					if (line.trim().length() != 0) {
						if (line.substring(0, 1).equals("@")) {
							// We are processing the header
							if (line.substring(0, 5).equals("@data")) {
								System.out.println("Data begins");
							} else if (line.substring(0, 9).equals("@relation")) {
								System.out.println(line.substring(10));
							} else if (line.substring(0, 10).equals(
									"@attribute")) {
								String word = line.substring(11, line
										.lastIndexOf(' '));
								words.add(attributeCount, word);
								attributeCount++;
							}
						} else {
							System.out.println("Data " + line.length());
							line = line.substring(1, line.length() - 1);
							StringTokenizer stk = new StringTokenizer(line, ",");
							while (stk.hasMoreTokens()) {
								String featureAndValue = stk.nextToken();
								int spaceIndex = featureAndValue.indexOf(' ');
								String featureName = words.get(Integer
										.parseInt(featureAndValue.substring(0,
												spaceIndex)));
								float value = Float.parseFloat(featureAndValue
										.substring(spaceIndex + 1));
								if (featureValueMap.get(featureName) != null) {
									if (featureValueMap.get(featureName) <= value) {
										featureValueMap.put(featureName, value);
									}
								} else {
									featureValueMap.put(featureName, value);
								}
							}
						}
					}
				}

			} while (line != null);
			System.out.println("Num of attributes:" + attributeCount);
			fstream.close();

			// Sort the features by value
			for (String featureName : featureValueMap.keySet()) {
				System.out.println(featureName + " "
						+ featureValueMap.get(featureName));
			}
int count = 0;
			for (Iterator i = sortByValueDesc(featureValueMap).iterator(); i
					.hasNext();) {
				String key = (String) i.next();
				System.out.println(key + " " + featureValueMap.get(key));
				count ++;
				if(count > 99) {
					break;
				}
			}

		} catch (FileNotFoundException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (NumberFormatException e) {
			e.printStackTrace();
		}
	}

	public static List sortByValue(final Map m) {
		List keys = new ArrayList();
		keys.addAll(m.keySet());
		Collections.sort(keys, new Comparator() {
			public int compare(Object o1, Object o2) {
				Object v1 = m.get(o1);
				Object v2 = m.get(o2);
				if (v1 == null) {
					return (v2 == null) ? 0 : 1;
				} else if (v1 instanceof Comparable) {
					return ((Comparable) v1).compareTo(v2);
				} else {
					return 0;
				}
			}
		});
		return keys;
	}

	public static List sortByValueDesc(final Map m) {
		List keys = new ArrayList();
		keys.addAll(m.keySet());
		Collections.sort(keys, new Comparator() {
			public int compare(Object o1, Object o2) {
				Object v1 = m.get(o1);
				Object v2 = m.get(o2);
				if (v1 == null) {
					return (v2 == null) ? 0 : 1;
				} else if (v1 instanceof Comparable) {
					return ((Comparable) v2).compareTo(v1);
				} else {
					return 0;
				}
			}
		});
		return keys;
	}
}
