package com.rrd.mex.dw.hdfs;

import com.rrd.mex.dw.utils.DateUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;


public class JobParser {

	private static String HDFS = "hdfs://ns1/";
	private Configuration conf;
	private DocumentBuilderFactory dbFactory = null;
	private DocumentBuilder db = null;
	private FileSystem fs = null;

	public JobParser() throws ParserConfigurationException, IOException {
		this.conf = config();
		dbFactory = DocumentBuilderFactory.newInstance();
		db = dbFactory.newDocumentBuilder();
		fs = FileSystem.get( conf);
	}

	public static Configuration config() {
		// JobConf conf = new JobConf(HdfsUtils.class);
		System.setProperty("HADOOP_USER_NAME", "hdfs");
		Configuration conf = new Configuration();
		conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
		return conf;
	}

	private List<String> listFile(String dirpath) throws IOException {
		List<String> fileList = new ArrayList<String>();
		Path path = new Path(dirpath);
		FileStatus[] list = fs.listStatus(path);
		System.out.println("ls: " + dirpath);
		for (FileStatus f : list) {

			if (f.isDirectory()) {
				fileList.addAll(listFile(f.getPath().toString()));
			} else {

				if (f.getPath().getName().endsWith(".xml")) {
					fileList.add(f.getPath().toString());
				}

			}

		}
		return fileList;
	}

	public static final String CRT_FLAG = "\001";

	private void parseXML(String dt) throws IOException, SAXException {
		String dirpath = "/user/history/done/" + formateDateStr(dt);
		String outDir = "/data/bdm/audit_yarn_history_hive_inc/dt=" + dt + "/yarn_00001";
		List<String> list = listFile(dirpath);

		FSDataInputStream fsdis = null;
		Document document = null;
		Node node = null;
		Node nodeValue = null;
		NodeList nameList = null;
		OutputStream out = fs.create(new Path(outDir), true);
		Writer writer = new OutputStreamWriter(out);
		BufferedWriter bf = new BufferedWriter(writer);
		StringBuilder sb = new StringBuilder();
		
		int counter=0;
		for (String s : list) {
//			System.out.println(s);
			counter++;
			if(counter%100==0){
				System.out.println("第"+counter+"文件："+s);
			}
			String sql = "";
			String timestr="";
			String user=""; 
			String user2=""; 
			String maps="";
			String reduces="";
			fsdis = fs.open(new Path(s));
			document = db.parse(fsdis);
			Element root = document.getDocumentElement();
			NodeList prList = root.getElementsByTagName("property");
			sb.delete(0, sb.length());
			sb.append(s.substring(s.lastIndexOf("/") + 1, s.lastIndexOf("_"))).append(CRT_FLAG);
			for (int i = 0; i < prList.getLength(); i++) {
				nameList = prList.item(i).getChildNodes();
				node = nameList.item(0);
				if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("hive.sentry.subject.name")) {
					nodeValue = nameList.item(1);
					user=nodeValue.getTextContent();
					
				} else if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("mapreduce.job.cache.files.timestamps")) {
					nodeValue = nameList.item(1);
					timestr = nodeValue.getTextContent();
					
				} else if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("hive.query.string")) {
					nodeValue = nameList.item(1);
					sql=nodeValue.getTextContent();
					
				}else if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("mapreduce.job.user.name")) {
					nodeValue = nameList.item(1);
					user2=nodeValue.getTextContent();
				}else if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("mapreduce.job.maps")) {
					nodeValue = nameList.item(1);
					maps=nodeValue.getTextContent();
				}else if (node != null && node.getTextContent() != null
						&& node.getTextContent().equals("hive.exec.reducers.max")) {
					nodeValue = nameList.item(1);
					reduces=nodeValue.getTextContent();
				}

			}
			sb.append(user).append(CRT_FLAG).append(user2).append(CRT_FLAG);
			if(timestr!=null && !timestr.equals("")){
				sb.append(timestr.split(",")[0]).append(CRT_FLAG);
			}else{
				sb.append("").append(CRT_FLAG);
			}
			sb.append(sql.replace("\r", "\\r").replace("\n", "\\n"));
			sb.append(CRT_FLAG).append(maps).append(CRT_FLAG).append(reduces);
//			System.out.println(sb.toString());
			bf.write(sb.toString());
			bf.newLine();
			

		}
		bf.close();
		
		fs.close();
	}

	public static String formateDateStr(String dateStr) {
		Date date = DateUtil.formateToDate(dateStr, "yyyy-MM-dd");
		dateStr = DateUtil.formateDateStr(date, "yyyy/MM/dd");
		System.out.println(dateStr);
		return dateStr;
	}

	public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException {
		if(args==null || args.length<=0){
			System.out.println("请输入日期参数：yyyy-MM-dd");
			System.exit(-1);
		}
		String dt = args[0];
		System.out.println("开始运行，日期为："+dt);
		JobParser job = new JobParser();
		job.parseXML(dt);

	}
}
