package com.bclz.service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Properties;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

import com.bclz.config.Constants;
import com.bclz.config.HadoopManager;
import com.bclz.config.LoadProperties;


/**
 * 
* @ClassName: WordCountService  
* @Description: 1. 获取  ObjectReduce实现类;
* 				2. 读取FS上需要mapreduce操作的文件夹状态;
* 				3. 通过FS open获取输入流，依次读取得到每一行内容遍历,进行map操作;
* 				4. 得到结果Map,创建FS输出流,结果写到相应的结果文件中
* @author xuchang  
* @date 2018年9月12日
 */
public class WordCountService {
		
	
	public void wordCount() {
		
		FileSystem fs=HadoopManager.getFileSystem();
		System.out.println(fs);
		Properties p=LoadProperties.loadHadoopConfig();
		CountMap context=new CountMap();
		
		
		Path inputDire=new Path("/test/logs");
		Path outDire=new Path("/test/output");
		
		try {
			
			//获取实现类
			Class<?> mapImpl=Class.forName(p.getProperty(Constants.WORD_COUNT_CLASSNAME));
			ObjectReduce reduce=(ObjectReduce)mapImpl.newInstance();
			
			//获取远程文件夹状态
			RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(inputDire, true);
			
			while(listFiles.hasNext()) {
				LocatedFileStatus lStatus=listFiles.next();
				FSDataInputStream in=fs.open(lStatus.getPath());
				BufferedReader reader=new BufferedReader(new InputStreamReader(in));
				String contextLine="";
				while((contextLine=reader.readLine())!=null) {
					reduce.map(contextLine, context);
				}
				in.close();
				reader.close();
			}
			
		
		
		FSDataOutputStream out=fs.create(new Path(outDire, "result.txt"));
		
		context.getMap().forEach((k,v)->{
			
			try {
				out.write( (k+"\t"+v+"\n").getBytes("utf-8"));
			} catch (IOException e) {
				// TODO Auto-generated catch block
				System.out.println("map结果输出出错:"+e.getMessage());
			}
			
		});
		
		} catch (Exception e) {
			// TODO Auto-generated catch block
			System.out.println("map失败:"+e.getMessage());
		}finally {
			try {
				fs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
	}
	
	
	public static void main(String[] args) throws IllegalArgumentException, IOException {
		
		WordCountService wordService=new WordCountService();
		wordService.wordCount();
		
		FileSystem fs=HadoopManager.getFileSystem();
		System.out.println(fs);
		
		FSDataInputStream in=fs.open(new Path("/test/output/result.txt"));
		new BufferedReader(new InputStreamReader(in)).lines().forEach(System.out::println);
		fs.close();
	}
	
}
