package clusteringTest;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DecodeRecord {

	/**
	 * @author wanyi
	 * @param filelocation存放结果的路径
	 */
	public static void decodeRecord (String filelocation)
	{
		Configuration conf = new Configuration ();
		try {
			int space = 0;
			String oneRecord = null;
			String vectorrecord = null;
			FileSystem fs = FileSystem.get(URI.create(filelocation), conf);
			FSDataInputStream in = null;
			Path path = new Path(filelocation);
			in = fs.open(path);
			int i = 1;
			while ((oneRecord = in.readLine()) != null)
			{
				System.out.println("Read Record " + i++ + " from result:\n" + oneRecord);
				System.out.println("Now decoding...");
				space = oneRecord.indexOf(" ");
				vectorrecord = oneRecord.substring(space + 1);
				GetKeyFromVector getKeyFromVector = new GetKeyFromVector();
				getKeyFromVector.getKeyFromVector("hdfs://localhost:9000/user/tuxinhui/output/part-r-00000", vectorrecord);
			}
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}
	
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		DecodeRecord dr = new DecodeRecord();
		dr.decodeRecord("hdfs://localhost:9000/user/tuxinhui/output/part-r-00000");
		
	}

}
