package multipleLinearRegression;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
           
public class MultipleLinearRegression {
	  
	  public static void main(String[] args) throws Exception {
		  String[] theta;
		  int iteration = 0, num_of_iteration = 1;
		  int feature_size =0, input_data_size=0;
		  FileSystem fs;
		  Float alpha = 0.1f;
		  
		  do {
			  Configuration conf = new Configuration();
			  fs = FileSystem.get(conf);
	           
			  Job job = new Job(conf, "LinearRegressionMapReduce");
			  job.setJarByClass(MultipleLinearRegression.class);
			  
			  // the following two lines are needed for propagating "theta"
			  conf = job.getConfiguration();
			  
			  job.setOutputKeyClass(LongWritable.class);
			  job.setOutputValueClass(FloatWritable.class); 
		           
			  job.setMapperClass(MultipleLinearRegressionMapper.class);
			  job.setReducerClass(MultipleLinearRegressionReducer.class);
		           
			  job.setInputFormatClass(TextInputFormat.class);
			  job.setOutputFormatClass(TextOutputFormat.class);
			  
			  job.setNumReduceTasks(1); // set mapred.reduce.tasks = 1 (only one reducer)
		  
			  FileInputFormat.addInputPath(job, new Path(args[0]));
			  Path out = new Path(args[1]);
			  if (fs.exists(out)) fs.delete(out, true);
			  
			  FileOutputFormat.setOutputPath(job, out);
			  alpha = Float.parseFloat(args[2]);
			  num_of_iteration = Integer.parseInt(args[3]);
			  feature_size = Integer.parseInt(args[4]);
			  input_data_size = Integer.parseInt(args[5]);
			  conf.setFloat("alpha", alpha);
			  conf.setInt("feature_size", feature_size);
			  conf.setInt("input_data_size", input_data_size);
			  conf.setInt("iteration", iteration);
			  
			  theta = new String[feature_size];
		       
			  if (iteration == 0){ // first iteration
				  for (int i=0; i< theta.length; i++)
					  theta[i]="0.0";
				  conf.setStrings("theta", theta);
			  }else{
				  try{
					  String uri = "theta.txt";
					  fs = FileSystem.get(conf);
					  //FSDataInputStream in = fs.open(new Path(uri));
					  BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(new Path(uri))));
					  theta = br.readLine().split(",");
				  }catch (Exception e){
					  
				  } 
				  conf.setStrings("theta", theta);
			  }
			  
			  for (int i=0; i<theta.length;i++)
		    		System.out.println("In MapRedce main function: theta[ "+i +" ]" + theta[i]);
			  
			  try {
				  job.waitForCompletion(true);
				  iteration++;
			  } catch (IOException e) {
				  e.printStackTrace();
			  }
		 } while (iteration < num_of_iteration);
		    
   		System.exit(0); 
	  }
   }
