package com.bigdata.spark.common.util;

import java.io.FileNotFoundException;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class SparkJobUtil {
	private static final Logger LOGGER = LoggerFactory.getLogger(SparkJobUtil.class);

	public static void closeQuietly(FileSystem fileSystem) {
		if (fileSystem != null) {
			try {
				fileSystem.close();
			} catch (IOException e) {
				LOGGER.error("Fail to close FileSystem:" + fileSystem, e);
			}
		}
	}

	public static void checkFileExists(String path) {
		Configuration configuration = new Configuration();
		FileSystem fileSystem = null;
		try {
			fileSystem = FileSystem.get(configuration);
			if (!fileSystem.exists(new Path(path))) {
				throw new FileNotFoundException(path);
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			closeQuietly(fileSystem);
		}
	}
}
