package com.naver.hdfs;

import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

import model.Board;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class HDFSBoardBO {
	private HDFSBoardDAO boardDAO;
	private FileSystem fs;
	private String repositoryPath;
	
	public void setBoardDAO(HDFSBoardDAO boardDAO) {
		this.boardDAO = boardDAO;
	}

	public void setRepositoryPath(String repositoryPath) {
		this.repositoryPath = repositoryPath;
	}
	
	public void setFileSystem(HDFSConfiguration configuration) throws IOException {
		this.fs = configuration.getFileSystem();
	}

	public void makeDirectory(String path) throws IOException {
		boardDAO.makeDirectory(fs, new Path(path));
	}
	
	public void delete(long articleNo) {
		try {
			boardDAO.delete(fs, new Path(repositoryPath + "/" +articleNo));
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	public void write(Board board, boolean modify) {
		DataOutputStream out;
		try {
			makeDirectory(repositoryPath);
			Path boardPath = new Path(repositoryPath + "/" + board.getArticleNo());
			out = fs.create(boardPath, modify);
			try {
				boardDAO.write(board, out);
			} finally {
				out.close();
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	public Board select(long articleNo) {
		InputStream is;
		Board board = null;
		try {
			Path boardPath = new Path(repositoryPath + "/" + articleNo);
			is = fs.open(boardPath);
			BufferedReader br = new BufferedReader(new InputStreamReader(is));
			String line;
			while ((line = br.readLine()) != null) {
				String [] field = line.split(" ");
				board = new Board(Long.valueOf(field[0]), field[1], field[2]);
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
		return board;
	}
}
