package clusteringTest;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;


public class GetKeyFromVector {

	/**
	 * @author wanyi
	 * @param args
	 * @param dic :所有key value 记录存放的文件路径
	 * @param dic :要查询的vector
	 */
	public static void getKeyFromVector(String dic, String vector)
	{
		Configuration conf = new Configuration ();
		try {
			//record 每一行<key,value>记录
			String record = null;
			String vectorrecord = null;
			int space = 0;
			FileSystem fs = FileSystem.get(URI.create(dic), conf);
			FSDataInputStream in = null;
			Path path = new Path(dic);
			in = fs.open(path);
			while ((record = in.readLine())!= null)
			{
				space = record.indexOf(" ");
				vectorrecord = record.substring(space + 1);
				if (vectorrecord.equals(vector))
				{
					break;
				}
					
			}
			
			System.out.println(vectorrecord);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}	
	}
	
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		GetKeyFromVector getKeyFromVector = new GetKeyFromVector();
		getKeyFromVector.getKeyFromVector("hdfs://localhost:9000/user/tuxinhui/output/part-r-00000", "{2:1.0,17:1.0,76:1.0,96:1.0}");
	}

}
