package exp;

import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Map;

import tools.FileReaderEnhance;
import tools.ObjectTools;
import tools.twitter.TweetTidy;
import tools.twitter.URLTools;

/**
 * Extract all URLs to download
 * 
 * @author Lanjun
 * 
 */
public class EXP3_0 {
	public void run(String inputpath, String outputpath) {
		for (File file : (new File(inputpath)).listFiles()) {
			String[] lines = FileReaderEnhance.readToString(file, "UTF-8")
					.split("\n");
			Map<String, String> urlAndContents = null;
			File urlFile = new File(outputpath + file.getName());
			if (urlFile.exists()) {
				System.out.println("EXP3_0: " + file.getAbsolutePath() + " already processed!");
				continue;
			} else {
				urlAndContents = new LinkedHashMap<>();
			}

			for (String line : lines) {
				line = TweetTidy.doTidyHTML(line);
				ArrayList<String> urls = URLTools.getURLs(line);
				for (String url : urls) {
					if (urlAndContents.containsKey(url)) {
//						System.out.println("Existed:" + url);
						continue;
					} else {
						urlAndContents.put(url, null);
					}
				}
			}

			ObjectTools.writeToFile(urlAndContents, outputpath + file.getName());
		}
	}
	
	public static void main(String[] args) {
		String inputpath = "data/_newData/plainText/";
		String outputpath = "data/_newData/obj/urlContent/";
		
		EXP3_0 exp = new EXP3_0();
		exp.run(inputpath, outputpath);
	}
}
