

package org.apache.hadoop.hdfs.server.datanode;

import java.io.BufferedReader; 
import java.io.FileReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TimerTask;

public class ReloadBlockList extends TimerTask {

	@Override
	public void run() {
		
		
		try 
		{
			Map<Long,BlockSender.ReaderType> newBlockList = new HashMap<Long,BlockSender.ReaderType>();
			//System.out.println("Reading in cyclical block list from: " + PrototypeUtilities.CYCLICAL_BLOCK_LIST_FILENAME);
			BufferedReader in = new BufferedReader(new FileReader(PrototypeUtilities.CYCLICAL_BLOCK_LIST_FILENAME));
			String blockID = in.readLine();
			while (blockID != null) {
				
				try {
					//System.out.println("Read line: " + blockID);
					String[] lineComponents = blockID.split(";");
					Long blockIDLong = Long.parseLong(lineComponents[0]);
					BlockSender.ReaderType readerType = BlockSender.ReaderType.valueOf(lineComponents[1]);
					if (blockIDLong != null) {
						newBlockList.put(blockIDLong,readerType);
					}
					
					//System.out.println("Converted to Long: " + blockIDLong);
					
				}
				catch (Exception ex) {
					//System.out.println("Line had invalid format - skipping (NaN): " + blockID);
				}
								
				blockID = in.readLine();
			}
			

			
			BlockSender.AMEND_BLOCKS_TO_USE_CYCLICAL_SCANNING(newBlockList);
		}
		catch (Exception e) {
			System.out.println("File not found, IO exception or invalid file format when reading block list: " + e.getMessage());
			e.printStackTrace();
		}
		

		
		
	}
	
}