/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.fone.flumeExt.sink.L2HSink;

import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

// TODO: Auto-generated Javadoc
/**
 * <p>
 * Title: File2HDFSJob.java
 * </p>
 * The Class File2HDFSJob.
 * <p>
 * Description:
 * </p>
 *
 * @author Phoenics Chow
 * @version 1.0
 * @date 2014-6-3
 */
public class File2HDFSJob implements Job {

	/** The log. */
	static Logger LOG = LoggerFactory.getLogger(File2HDFSJob.class);

	/** The children list. */
	static private List<File> childrenList = new CopyOnWriteArrayList<File>(); // CopyOnWriteArrayList

	/*
	 * (non-Javadoc)
	 * 
	 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
	 */
	@Override
	public void execute(JobExecutionContext context) throws JobExecutionException {
		if (!childrenList.isEmpty()) {
			LOG.info("上一个job没有处理完成，本次job放弃");
			return;
		}
		// String filePath, String hdfsPath , String cronExpression ,boolean
		// isKeep
		try {
			String jobName = context.getJobDetail().getKey().getName();
			LOG.info("the {} job start  run.", jobName);
			JobDataMap dataMap = context.getJobDetail().getJobDataMap();
			String filePath = dataMap.getString("filePath");
			String hdfsPath = dataMap.getString("hdfsPath");
			boolean isKeep = dataMap.getBoolean("isKeep");
			getChildFilesByRecursion(filePath);
			upFileToHdfs(filePath, hdfsPath, !isKeep);
			LOG.info("the {} job  end .", jobName);
		} finally {
			childrenList.clear();
		}
	}

	/**
	 * Up file by hadoop.
	 *
	 * @param localFile
	 *            the local file
	 * @param hdfsFile
	 *            the hdfs file
	 * @param deleteLocalFile
	 *            the delete local file
	 */
	private void upFileByHadoop(String localFile, String hdfsFile, boolean deleteLocalFile) {
		try {
			Path localFilePath = new Path(localFile);
			Path hdfsFilePath = new Path(hdfsFile);
			FileSystem fileSystem = FileSystem.get(new Configuration());
			fileSystem.copyFromLocalFile(deleteLocalFile, true, localFilePath, hdfsFilePath);
		} catch (IOException e) {
			LOG.error("写向hdfs错误：", e);
		}
	}

	/**
	 * Up file to hdfs.
	 *
	 * @param inpath
	 *            the inpath
	 * @param hdfsPath
	 *            the hdfs path
	 * @param deleteLocalFile
	 *            the delete local file
	 */
	private void upFileToHdfs(String inpath, String hdfsPath, boolean deleteLocalFile) {
		String fileAbsPath = null;
		String hdfsRealPath = null;
		for (File file : childrenList) {
			fileAbsPath = file.getAbsolutePath();
			hdfsRealPath = hdfsPath + "/" + fileAbsPath.substring(inpath.length());
			upFileByHadoop(fileAbsPath, hdfsRealPath, deleteLocalFile);
		}
	}

	/**
	 * Gets the child files by recursion.
	 *
	 * @param path
	 *            the path
	 * @return the child files by recursion
	 */
	private void getChildFilesByRecursion(String path) {
		File parentFile = new File(path);
		File[] children = parentFile.listFiles();
		if (children.length == 0) {
			parentFile.delete();
		}
		for (File file : children) {
			if (file.isFile()) {
				if (!file.getName().endsWith(".tmp")) {
					childrenList.add(file);
				}
			} else if (file.isDirectory()) {
				getChildFilesByRecursion(file.getAbsolutePath());
			}
		}
	}
}
