/* parbash.c -- Parallel BASH */

/* Copyright (C) 1987-2005 Free Software Foundation, Inc.

   This file is part of GNU Bash, the Bourne Again SHell.

   Bash is free software; you can redistribute it and/or modify it
   under the terms of the GNU General Public License as published by
   the Free Software Foundation; either version 2, or (at your option)
   any later version.

   Bash is distributed in the hope that it will be useful, but WITHOUT
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
   License for more details.

   You should have received a copy of the GNU General Public License
   along with Bash; see the file COPYING.  If not, write to the Free
   Software Foundation, 59 Temple Place, Suite 330, Boston, MA 02111 USA. */
#include "config.h"

#if !defined (__GNUC__) && !defined (HAVE_ALLOCA_H) && defined (_AIX)
  #pragma alloca
#endif /* _AIX && RISC6000 && !__GNUC__ */

#include <stdio.h>
#include "chartypes.h"
#include "bashtypes.h"
#if !defined (_MINIX) && defined (HAVE_SYS_FILE_H)
#  include <sys/file.h>
#endif
#include "filecntl.h"
#include "posixstat.h"
#include <signal.h>
#ifndef _MINIX
#  include <sys/param.h>
#endif

#if defined (HAVE_UNISTD_H)
#  include <unistd.h>
#endif

#include "posixtime.h"

#if defined (HAVE_SYS_RESOURCE_H) && !defined (RLIMTYPE)
#  include <sys/resource.h>
#endif

#if defined (HAVE_SYS_TIMES_H) && defined (HAVE_TIMES)
#  include <sys/times.h>
#endif

#include <errno.h>

#if !defined (errno)
extern int errno;
#endif

#include "bashansi.h"
#include "bashintl.h"

#include "memalloc.h"
#include "shell.h"
#include <y.tab.h>	/* use <...> so we pick it up from the build directory */
#include "flags.h"
#include "builtins.h"
#include "hashlib.h"
#include "jobs.h"
#include "execute_cmd.h"
#include "findcmd.h"
#include "redir.h"
#include "trap.h"
#include "pathexp.h"
#include "hashcmd.h"
#include "variables.h"

#if defined (COND_COMMAND)
#  include "test.h"
#endif

#include "builtins/common.h"
#include "builtins/builtext.h"	/* list of builtins */

#include <glob/strmatch.h>
#include <tilde/tilde.h>

#if defined (BUFFERED_INPUT)
#  include "input.h"
#endif

#if defined (ALIAS)
#  include "alias.h"
#endif

#if defined (HISTORY)
#  include "bashhist.h"
#endif

extern int allow_null_glob_expansion;

static int
command_name_is (command, name) 
     COMMAND* command;
     char* name;
{
  switch(command->type) {
    case cm_simple: 
       return (strcmp(command->value.Simple->words->word->word,name)); 
       break;

    case cm_connection: 
       return(strcmp(command->value.Connection->first->value.Simple->words->word->word,name));
       break;

    default:
       abort(); 
       break;
   }
}

static int
has_more_commands (command)
     COMMAND* command;
{
   if (command->type == cm_connection) return 1;
   return 0;
}

COMMAND*
next_command (command)
     COMMAND* command;
{
   return ( command->value.Connection->second );
}

void
append_to_word_list (w, l)
   WORD_DESC* w;
   WORD_LIST* l;
{
   while(l->next!=NULL) l = l->next;
   l->next = (WORD_LIST *)xmalloc(sizeof(WORD_LIST));
   l = l->next;
   l->word = w;
   l->next = NULL;
}

void
concat_word_list (l1, l2)
   WORD_LIST* l1;
   WORD_LIST* l2;
{
   while(l1->next!=NULL) l1 = l1->next;
   l1->next = l2;
}

void 
merge_commands (c1, c2)
   COMMAND* c1;
   COMMAND* c2;
{
   if (c2->type == cm_connection)
      c2 = c2->value.Connection->first;

   if (c1->value.Simple->words == NULL) {
      c1->value.Simple->words = copy_word_list(c2->value.Simple->words);
   } else {
       append_to_word_list(make_bare_word("|"),c1->value.Simple->words);
       concat_word_list(c1->value.Simple->words,
                        copy_word_list(c2->value.Simple->words));
   }
}

WORD_LIST*
read_sort_keys(command)
   COMMAND* command;
{
   if (command->type == cm_connection)
      command = command->value.Connection->first;

   return command->value.Simple->words->next;
}

int
has_unsuported_key_format(command)
   COMMAND* command;
{
   WORD_LIST* key;
   if (command->type == cm_connection)
      command = command->value.Connection->first;
   for(key = command->value.Simple->words; key != NULL; key = key->next) {
      if ( strncmp(key->word->word,"-k",2) == 0 && strlen(key->word->word) > 2) {
         return 1;
      }
   }
   return 0;
}

WORD_LIST*
read_partition_keys(command)
   COMMAND* command;
{
   int key_count; // number of partitioning keys
   char key_str[20];
   WORD_LIST* last;
   WORD_LIST* key;
   WORD_LIST* partition_keys = NULL;

   if (command->type == cm_connection)
      command = command->value.Connection->first;

   key_count = 0;
   for(key = command->value.Simple->words->next; key != NULL; key = key->next) {
     if ( strncmp(key->word->word,"-k",2) == 0 && strlen(key->word->word) == 2) {
       if (partition_keys == NULL) {
         if (key->next == NULL) return NULL;
         partition_keys = make_word_list(key->word,make_word_list(key->next->word,NULL));
         last = key->next;
         key = key->next; // for loop will skip one more
         key_count++;
       } else if (strcmp(key->next->word->word,last->word->word) != 0 ) {
         concat_word_list(partition_keys, make_word_list(key->word,make_word_list(key->next->word,NULL)));
         last = key->next;
         key = key->next; // for loop will skip one more
         key_count++;
       } else {
         break; // early partitioning
       }
     } 
   }

   //return partition_keys;
   sprintf(key_str,"-k1,%d",key_count);
   return make_word_list(make_bare_word(key_str),NULL);
}

WORD_DESC*
read_sort_delimiter(command)
   COMMAND* command;
{
   WORD_LIST* key;

   if (command->type == cm_connection)
      command = command->value.Connection->first;

   for(key = command->value.Simple->words->next; key != NULL; key = key->next) {
      if (key->next == NULL) break;
      if (strncmp(key->word->word,"-t",2) == 0) return copy_word(key->next->word);
   }

   return make_bare_word("\\t");
}

int
sort_has_options(command)
    COMMAND* command;
{
   if (command->type == cm_connection)
      command = command->value.Connection->first;
   
   return command->value.Simple->words->next != NULL;
}

char*
word_list_to_string(words, sep, quote)
     WORD_LIST* words;
     char* sep;
     int quote;
{
   ARRAY* a;
   char* str;

   a = array_from_word_list(words);
   str = array_to_string(a,sep,quote);
   array_dispose(a);
   return str;
}

COMMAND* 
transform_command (command,command2,sort,inputs,output)
     COMMAND* command; /* mapper */
     COMMAND* command2; /* reducer */
     COMMAND* sort;
     WORD_LIST* inputs;
     WORD_DESC* output;
{
   WORD_LIST* p; /* inputs iterator */
   WORD_LIST* l; /* inputs iterator */
   WORD_LIST* inputs2; /* copy of inputs for use in user display */
   WORD_LIST* command_name_words;
   char* hadoop[10]; /* hadoop command line skeleton */
   char* streaming_jar; /* path to the streaming jar */
   char untar[] = "( tar zxf parbash_app.tar.gz ; cat ) | ";
   COMMAND* tar_cmd = NULL;

   SHELL_VAR* parbash_app = find_variable("PARBASH_APP");
   SHELL_VAR* parbash_hadoop = find_variable("PARBASH_HADOOP");
   SHELL_VAR* parbash_hadoop_jobconf = find_variable("PARBASH_HADOOP_JOBCONF");
   if (parbash_hadoop_jobconf && array_p(parbash_hadoop_jobconf) == 0) {
      report_error("PARBASH_HADOOP_JOBCONF must be an array. Try PARBASH_HADOOP_JOBCONF=(\"jobconf.opt.example=1\" \"jobconf.opt.example2=2,2\")");
      return NULL;
   }

   SHELL_VAR* parbash_home = find_variable("PARBASH_HOME");
   if (parbash_home == NULL) {
      report_error("PARBASH_HOME not set.");
      return NULL;
   }

   /* <path> + '/' + 'streaming.jar' + \0 */
   streaming_jar = xmalloc(strlen(parbash_home->value)+1+13+1);
   strcpy(streaming_jar,parbash_home->value);
   strcat(streaming_jar,"/streaming.jar");

   /* hadoop must be on the PATH */
   hadoop[0] = "hadoop";
   if (parbash_hadoop != NULL) hadoop[0] = value_cell(parbash_hadoop);

   hadoop[1] = "jar";
   hadoop[2] = streaming_jar;

   hadoop[3] = "-jobconf";
   hadoop[4] = "mapred.job.reuse.jvm.num.tasks=-1"; 
   hadoop[5] = 0;
  
   if (parbash_app) {
      hadoop[5] = "-file";
      hadoop[6] = "parbash_app.tar.gz";
      hadoop[7] = 0;
   }

   /* this should never be the case */
   if(inputs == NULL || output == NULL) abort();

   if (command->type == cm_connection) 
      command = command->value.Connection->first;

   /* use hadoop as backend */
   WORD_LIST* h = strvec_to_word_list (hadoop, 1, 0);

   /* append any user-specified options */
   if (parbash_hadoop_jobconf != NULL) {
      /* insert "-jobconf" ahead of each word in user options */
      WORD_LIST* user_opts = array_to_word_list(array_cell(parbash_hadoop_jobconf));
      for (p = NULL, l = user_opts; l; p = l, l = l->next) {
         if (p==NULL) user_opts = make_word_list(make_bare_word("-jobconf"),user_opts);
         else p->next = make_word_list(make_bare_word("-jobconf"),l);
      }
      concat_word_list(h, user_opts);
   }

   /* add application files */
   if (parbash_app != NULL) {
      char* tar[10] = { "tar", "zcf", "parbash_app.tar.gz", 0 };
      tar[3] = value_cell(parbash_app);
      tar[4] = 0;
      tar_cmd = copy_command(command);
      dispose_words(tar_cmd->value.Simple->words);
      tar_cmd->value.Simple->words = strvec_to_word_list(tar, 1, 0);
   }

   inputs2 = copy_word_list(inputs); /* for use with command display */
   inputs = copy_word_list(inputs);

   /* insert "-input" ahead of each word in inputs */
   for (p = NULL, l = inputs; l; p = l, l = l->next) {
      if (p==NULL) inputs = make_word_list(make_bare_word("-input"),inputs);
      else p->next = make_word_list(make_bare_word("-input"),l);
   }

   concat_word_list(h,inputs);
   concat_word_list(h,make_word_list(make_bare_word("-output"),make_word_list(copy_word(output),NULL)));

   /* append inputs to the name of the command for user display */
   concat_word_list(inputs2, copy_word_list(command->value.Simple->words->next)); /* after first command name */
   command_name_words = make_word_list(copy_word(command->value.Simple->words->word),inputs2);

   /* append output to the name of the command for user display */
   if (command2 == NULL) concat_word_list(command_name_words, make_word_list(make_bare_word(">"),make_word_list(copy_word(output),NULL)));


   if (command2 == NULL || command == command2) {
      WORD_LIST* w = make_word_list(make_bare_word("-reducer"),make_word_list(make_bare_word("cat"),NULL));
      concat_word_list(h,w);
   } else {
      if (command2->type == cm_connection) 
         command2 = command2->value.Connection->first;
      if (command_name_is(command2,"sort")!=0) {
         concat_word_list(command_name_words,make_word_list(make_bare_word("|sort|"),copy_word_list(command2->value.Simple->words)));
         concat_word_list(command_name_words, make_word_list(make_bare_word(">"),make_word_list(copy_word(output),NULL)));
         char* agg_word = word_list_to_string(command2->value.Simple->words," ",1);
         WORD_LIST* a = make_word_list(make_bare_word("-reducer"),make_word_list(make_bare_word(agg_word),NULL));
         FREE(agg_word);
         concat_word_list(h,a);
      } else { // command2 is sort
	 abort();
      }
   }

   /* create command name to be used for user display */
   char* command_name = word_list_to_string(command_name_words," ",0);
   char* job_name = word_list_to_string(make_word_list(make_bare_word("mapred.job.name="),make_word_list(make_bare_word(command_name),NULL)),"",0);

   /* prepend untarring command for parbash app */
   if (parbash_app) {
      command->value.Simple->words = 
        make_word_list(make_bare_word(untar), command->value.Simple->words);  
   }

   /* actual commands to be run by MR */
   char* command_word = word_list_to_string(command->value.Simple->words," ",1);

   WORD_LIST* w = make_word_list(make_bare_word("-mapper"),make_word_list(make_bare_word(command_word),make_word_list(make_bare_word("-jobconf"),make_word_list(make_bare_word(job_name),NULL))));
   concat_word_list(h,w);

   /* partitioner */
   if (sort && has_unsuported_key_format(sort)) {
      report_error("-k<spec> is not supported. use -k <spec> instead.");
      dispose_words(h);
      dispose_words(command_name_words);
      FREE(streaming_jar);
      FREE(command_name);
      FREE(command_word);
      FREE(job_name);
      return NULL; 
   }
   if (sort && sort_has_options(sort)) {
	// sort -k1,1  -k3,4 -t '\t' .. Paritioner in streaming jar ignores extra params
        WORD_LIST* sort_keys = copy_word_list(read_sort_keys(sort));
        WORD_DESC* sort_delimiter = read_sort_delimiter(sort);
        if (strcmp(sort_delimiter->word,"")==0) { 
           report_error("sort delimiter syntax error");
           dispose_words(h);
           dispose_words(command_name_words);
           dispose_words(sort_keys);
           dispose_word(sort_delimiter);
           FREE(streaming_jar);
           FREE(command_name);
           FREE(command_word);
           FREE(job_name);
           return NULL; 
        }

        // sort -k1,1 -k1,1 -k3,4 -t '@' partition keys is always a prefix of sorted keys
        WORD_LIST* partition_keys = read_partition_keys(sort);
        if (partition_keys == NULL) { 
           report_error("sort options syntax error"); 
           dispose_words(h);
           dispose_words(command_name_words);
           dispose_words(sort_keys);
           dispose_word(sort_delimiter);
           FREE(streaming_jar);
           FREE(command_name);
           FREE(command_word);
           FREE(job_name);
           return NULL; 
        }

        char* stream_mapred_key_fields = word_list_to_string(make_word_list(make_bare_word("stream.mapred.key.fields="),sort_keys)," ",1);
        dispose_words(sort_keys);

        WORD_LIST* w = make_word_list(make_bare_word("-jobconf"),make_word_list(make_bare_word(stream_mapred_key_fields),NULL));
        FREE(stream_mapred_key_fields);

        char* stream_map_output_field_separator = word_list_to_string(make_word_list(make_bare_word("stream.map.output.field.separator="),make_word_list(copy_word(sort_delimiter),NULL)),"",1);
        dispose_word(sort_delimiter);

        w = make_word_list(make_bare_word("-jobconf"),make_word_list(make_bare_word(stream_map_output_field_separator),w));
        FREE(stream_map_output_field_separator);

        char* mapred_text_key_partitioner_options = word_list_to_string(make_word_list(make_bare_word("mapred.text.key.partitioner.options="),partition_keys)," ",1);
        dispose_words(partition_keys);

        w = make_word_list(make_bare_word("-partitioner"),make_word_list(make_bare_word("org.apache.hadoop.streaming.KeyFieldBasedPartitioner"),make_word_list(make_bare_word("-jobconf"),make_word_list(make_bare_word(mapred_text_key_partitioner_options),w))));
        FREE(mapred_text_key_partitioner_options);

        concat_word_list(h,w);
   }

   COMMAND* mr = copy_command(command);
   dispose_words(mr->value.Simple->words);
   mr->value.Simple->words = h;

   if (tar_cmd)
      mr = command_connect(tar_cmd, mr, ';');

   dispose_words(command_name_words);
   FREE(streaming_jar);
   FREE(command_name);
   FREE(command_word);
   FREE(job_name);

   return mr;
}

void
remove_storage_prefix(list)
    WORD_LIST* list;
{
   WORD_LIST* l;
   for(l = list; l; l = l->next) {
      WORD_DESC* tmp = l->word;
      l->word = make_bare_word((char*)(l->word->word+5));
      dispose_word(tmp);
   }
}

WORD_LIST*
read_input_spec(command)
     COMMAND *command;
{
  WORD_LIST* w;
  WORD_LIST* prev;
  switch(command->type) {
     case cm_simple:
         /* start from the second entry (skip the command name) */
         for(prev = command->value.Simple->words, w = prev->next; 
             w; 
             prev = w, w = w->next) {

            if (w->word->word[0] == '-') continue;
            //remove_storage_prefix(w);

            /* remove 'cat' filename parameters. 'cat' will process
               input as a mapper and apply transformations using flags if any.*/
            prev->next=NULL;
            return w;
         } 
         break;

     case cm_connection:
         /* start from the second entry (skip the command name) */
         for(prev = command->value.Connection->first->value.Simple->words, w = prev->next; w; prev = w, w = w->next) {
            if (w->word->word[0] == '-') continue;
           //remove_storage_prefix(w);

            /* remove 'cat' filename parameters. 'cat' will process
               input as a mapper and apply transformations using flags if any.*/
            prev->next = NULL;
            return w;
         } 
         break;
   }
}


WORD_DESC*
read_output_spec(command)
     COMMAND* command;
{
   WORD_DESC* fn;

   switch(command->type) {
      case cm_simple:
          if ( command->value.Simple->redirects == NULL || command->value.Simple->redirects->instruction != r_output_direction ) { return NULL; }
          fn = copy_word(command->value.Simple->redirects->redirectee.filename);
          dispose_redirects(command->value.Simple->redirects);
          command->value.Simple->redirects = NULL;
          return fn;
          break;

      case cm_connection:
          if ( command->value.Connection->second->value.Simple->redirects == NULL || command->value.Connection->second->value.Simple->redirects->instruction != r_output_direction ) { return NULL; }
          fn = copy_word(command->value.Connection->second->value.Simple->redirects->redirectee.filename);
          dispose_redirects(command->value.Connection->second->value.Simple->redirects);
          command->value.Connection->second->value.Simple->redirects = NULL;
          return fn;
          break;

      default:
          abort();
          break;
   }
}

int
pipeline_has_simple_commands_only(command)
     COMMAND* command;
{
   while(command) {
      if (command->type == cm_connection && 
          command->value.Connection->first->type != cm_simple) {
          return 0;
      } else if (command->type == cm_simple) { 
          return 1;
      } else if (command->type == cm_connection) {
         command = command->value.Connection->second;
      } else {
          return 0;
      }
   }
   return 1;
}

int 
non_s3n_inputs(command)
     COMMAND* command;
{
  WORD_LIST* w;

  if (command->type == cm_connection) 
     command = command->value.Connection->first;

  for(w = command->value.Simple->words->next; w; w = w->next) {
     if (w->word->word[0] == '-') continue;
     if (strncmp(w->word->word,"s3n:",4)==0) return 1;
  }
  return 0;
}

int 
all_s3n_inputs(command)
     COMMAND* command;
{
  WORD_LIST* w;

  if (command->type == cm_connection) 
     command = command->value.Connection->first;

  for(w = command->value.Simple->words->next; w; w = w->next) {
     if (w->word->word[0] == '-') continue;
     if (strncmp(w->word->word,"s3n:",4)!=0) return 0;
  }
  return 1;
}

int 
non_hdfs_inputs(command)
     COMMAND* command;
{
  WORD_LIST* w;

  if (command->type == cm_connection) 
     command = command->value.Connection->first;

  for(w = command->value.Simple->words->next; w; w = w->next) {
     if (w->word->word[0] == '-') continue;
     if (strncmp(w->word->word,"hdfs:",5)==0) return 1;
  }
  return 0;
}

int 
all_hdfs_inputs(command)
     COMMAND* command;
{
  WORD_LIST* w;

  if (command->type == cm_connection) 
     command = command->value.Connection->first;

  for(w = command->value.Simple->words->next; w; w = w->next) {
     if (w->word->word[0] == '-') continue;
     if (strncmp(w->word->word,"hdfs:",5)!=0) return 0;
  }
  return 1;
}

COMMAND*
transform_pipeline (command)
     COMMAND *command;
{
  WORD_LIST* inputs;
  WORD_DESC* output;
  COMMAND* sort = NULL;
  COMMAND* mr = NULL;

  /* return unchanged command if not a pipeline */
  if (command->value.Connection->connector != '|') return command;
   
  /* if pipeline does not start with cat return unchanged command */
  if (command_name_is(command,"cat")!=0) return command;

  /* extract input spec and make 'cat' be the first command of the
     mapper pipeline. this is in order to support cat command line
     options */

  if (allow_null_glob_expansion == 1) {
     report_error("nullglob option must be disabled");
     return NULL;
  }

  /* if cat files are not hadoop files return unchanged command */
  if (!non_hdfs_inputs(command) && !non_s3n_inputs(command)) {
     return command;
  }

  if ( (non_hdfs_inputs(command) && !all_hdfs_inputs(command)) ||  
       (non_s3n_inputs(command) && !all_s3n_inputs(command)) ) {
     report_error("all input files must be hdfs or s3n");
     return NULL;
  }

  inputs = read_input_spec(command); 

  /* if pipeline is not made up of only simple commands return error */
  if (!pipeline_has_simple_commands_only(command)) { 
     report_error("unsupported error. shell control constructres are not supported. only executables can be a part of the pipeline.");
     dispose_words(inputs);
     return NULL;
  }

  /* TODO: while not end of pipeline (multiple MR jobs in single pipeline)  */
 
     /* TODO: sort is the only command if followed by sort or it is last in the pipeline */

     /* TODO: sort | agg: sort followed by not sort or end, then next command is agg */

     /* while not sort or end of line, combine into a single mapper */  
     mr = make_bare_simple_command();
     merge_commands(mr,command);
     COMMAND* command2 = command;
     if(has_more_commands(command2)) {

         command2 = next_command(command2);

         while (command_name_is(command2,"sort")!=0 &&
                has_more_commands(command2)) {
            merge_commands(mr, command2);
            command2 = next_command(command2);
         }

         /* TODO: if sort and next is not sort or end of line, then it is agg */
         if (command_name_is(command2,"sort")==0) {
            if (has_more_commands(command2)) {
               /* TODO: for multiple commands in pipeline, output spec is not here */
               sort = copy_command(command2);
               command2 = copy_command(next_command(command2)); /* reducer */
               output = read_output_spec(command2);
               if (output == NULL) {
                  report_error("output of pipeline must be redirected to a file");
                  dispose_command(mr);
                  dispose_words(inputs);
                  return NULL;
               }
            } else {
		abort();
            }
         } else {
            /* TODO: for multiple commands in pipeline, output spec is not here */
            output = read_output_spec(command2);
            if (output == NULL) {
               report_error("output of pipeline must be redirected to a file");
               dispose_command(mr);
               dispose_words(inputs);
               return NULL;
            }
            merge_commands(mr, command2);
            command2 = NULL; /* no reducer */
         }
     }

     command = transform_command(mr, command2, sort, inputs, output);

     dispose_command(mr);
     dispose_command(command2);
     dispose_command(sort);
     dispose_words(inputs);
     dispose_word(output);

     /* TODO: multiple MR: add current hadoop command to transformed list */
         /* TODO: multiple MR: if list length > 1 then generate intermediate files */
  
   return command;
   /* TODO: multiple MR: if last element of transformed pipeline does not have redirection
         then, add 'hfs cat' as the last element */    
}
