package org.myorg;
/*
 * Compare democratic model's PGA with serial GA
 */

import java.io.*;
import java.util.*;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataInputStream;


/*import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.BlockLocation;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;

import org.apache.hadoop.mapred.FileInputFormat;*/
/*import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;*/
import org.myorg.GA;



public class Test {

	public static void main(String[] args) throws Exception {
		/*		String fileName = "test/test";
		String content = "this is a test\n";
		byte[] buff = content.getBytes();
		Configuration config = new Configuration();
		FileSystem fs = FileSystem.get(config);
		Path path = new Path(fileName);
		FSDataOutputStream outputStream = fs.create(path);
		outputStream.writeBytes(content);

        Path f = new Path("test/test");


        System.out.println("read [" + f.getName() + "] from hdfs:");
        FSDataInputStream dis = fs.open(f);
        buff = new byte[10];
        //String c2 = new String(dis.readByte());
        System.out.println(dis.readUTF());
        dis.close();

        //check if a file exists in HDFS

        boolean isExists = fs.exists(f);
        System.out.println("The file exists? [" + isExists + "]");

        //if the file exist, delete it

                if(isExists){
             boolean isDeleted = fs.delete(f, false);//fase : not recursive

             if(isDeleted)System.out.println("now delete " + f.getName());                 
        }*/

		Configuration conf = new Configuration();

		try {
			// Get a list of all the nodes host names in the HDFS cluster

			FileSystem fs = FileSystem.get(conf);
			DistributedFileSystem hdfs = (DistributedFileSystem)fs;
			DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
			String[] names = new String[dataNodeStats.length];
			System.out.println("list of all the nodes in HDFS cluster:"); //print info

			for(int i=0; i < dataNodeStats.length; i++){
				names[i] = dataNodeStats[i].getHostName();
				System.out.println(names[i]); //print info

			}
			//Path f = new Path("/user/stanley/doutput/11/part-00000");
			Path f = new Path("dinput/32/seg");
			//check if a file exists in HDFS

			boolean isExists = fs.exists(f);
			System.out.println("The file exists? [" + isExists + "]");

			//if the file exist, delete it

			if(isExists){
				boolean isDeleted = hdfs.delete(f, false);//fase : not recursive

				if(isDeleted)System.out.println("now delete " + f.getName());                 
			}

			//create and write

			System.out.println("create and write [" + f.getName() + "] to hdfs:");
			FSDataOutputStream os = fs.create(f, true, 0);
			String str = "1010";
			os.writeInt(str.length());
			//for(int i=0; i<10; i++){
				//os.writeChars(str);
				os.write(str.getBytes());
			//}
			//os.writeChars("\n");
			os.close();
			
			

			//get the locations of a file in HDFS

/*			System.out.println("locations of file in HDFS:");
			FileStatus filestatus = fs.getFileStatus(f);
			BlockLocation[] blkLocations = fs.getFileBlockLocations(filestatus, 0,filestatus.getLen());
			int blkCount = blkLocations.length;
			for(int i=0; i < blkCount; i++){
				String[] hosts = blkLocations[i].getHosts();
				//Do sth with the block hosts

				System.out.println(hosts);
			}

			//get HDFS file last modification time

			long modificationTime = filestatus.getModificationTime(); // measured in milliseconds since the epoch

			Date d = new Date(modificationTime);
			System.out.println(d);*/
			//reading from HDFS

			System.out.println("read [" + f.getName() + "] from hdfs:");
			FSDataInputStream dis = fs.open(f);
			
//			System.out.println(dis.readChar());
			
			//String st = dis.readLine();
			//String st =  dis.readLine();
			//char st = dis.readChar();
			int l = dis.readInt();
			byte[] bt = new byte[l];
			
			for(int i = 0; i < l; i++) {
				bt[i] = dis.readByte();
			}
			
			//dis.read(bt);
			String st = new String(bt);
//			st = dis.readLine();
			//System.out.println("res length is " + st.length());
			
			System.out.println("res is " + st);
			
			
			
			byte[] bs = new byte[dis.available()];
			dis.read(bs);
			for (byte b:bs) {
				System.out.printf("0x%x ", b);
			}
			System.out.println("length is " + bs.length);
			dis.close();

		} catch (Exception e) {
			// TODO: handle exception

			e.printStackTrace();
		}

		System.out.println("end\n");
	}

}

