package deprecated;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;



/**
 * redundant class, functionality provided by Inputwriter.java
 * @author Thomas Wangler
 *
 */
public class Xml_combinefile {

	//i/o files
	static String testsetfile = "D:\\pattr\\pattr\\claims\\pattr.de-en.claims.meta";
	static String corpusfile = "D:\\pattr\\partitionedCorpus\\pattr.testset.en";
			
	static String hadoopfile = "";
			
	static String comboutputfile = "";

	
	
	//streams
	
	
	
	static FileInputStream hadoopStream;
	static FileInputStream testsetStream;
	static FileInputStream corpusStream;
			
	static FileOutputStream combStream;
	
	
	
	
	/**
	 * @param args
	 * @throws IOException 
	 */
	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub

	    
		//reader and writer
		Writer outputWriter = new BufferedWriter(new OutputStreamWriter(combStream, "UTF8"));

	    
	    //hadooplines
		BufferedReader hadoopreader = new BufferedReader(new InputStreamReader(hadoopStream, "UTF8"));
	    hadoopStream = new FileInputStream(hadoopfile);
	    String hadoopline = hadoopreader.readLine();
	    Map<String, String> hadoopmap = new HashMap<String, String>();
	    while(hadoopline != null){

	    	String[] splitline = hadoopline.split("\t");


	    		hadoopmap.put(splitline[0], splitline[1]);

	    	     
            //advancing Readers
	    	hadoopline = hadoopreader.readLine();

        }
	    hadoopreader.close();



		
		testsetStream = new FileInputStream(testsetfile);
		BufferedReader testsetreader = new BufferedReader(new InputStreamReader(testsetStream, "UTF8"));
		String testsetline = testsetreader.readLine();
		ArrayList<String> testsetlist = new ArrayList<String>();
		
	    while(testsetline != null){
	    	
	    	testsetlist.add(testsetline);
	    	testsetline = testsetreader.readLine();
	    }
		
		testsetreader.close();
		
		
		
		
		corpusStream = new FileInputStream(corpusfile);
		BufferedReader corpusreader = new BufferedReader(new InputStreamReader(corpusStream, "UTF8"));
		String corpusline = testsetreader.readLine();
		ArrayList<String> corpuslist = new ArrayList<String>();
		
	    while(corpusline != null){
	    	
	    	corpuslist.add(corpusline);
	    	corpusline = corpusreader.readLine();
	    }
		
		corpusreader.close();
		
		
		
		combStream = new FileOutputStream(comboutputfile);
		Iterator<Map.Entry<String, String>> i = hadoopmap.entrySet().iterator();
		
		while(i.hasNext()){
			
			String outstr = "";
			
			Map.Entry<String, String> e = (Map.Entry<String, String>)i.next();
			String curr_key = e.getKey();
			String curr_value = e.getValue();
			
			for(String str: testsetlist){
				String split = str.split("\\|\\|\\|")[0];
				if(split.equals(curr_key)){
					outstr += str;
				}
			}
			
			for(String str: corpuslist){
				String split = str.split("\\|\\|\\|")[0];
				if(split.equals(curr_value)){
					outstr += "\t" + str;
				}
			}
			
			outputWriter.write(outstr);		
		}
		
		outputWriter.close();
	
	
	
	
	
	
	}	
}
