use Text::ParseWords;
use Switch;
use constant {
    TOKEN_END_OF_STREAM => -1,
    TOKEN_NEWLINE => -2, # \n
    TOKEN_SEMI => ord(";"), # ;
    TOKEN_AMP => ord("&"), # &
    TOKEN_BAR => ord("|"), # |
    TOKEN_LESS => ord("<"), # <
    TOKEN_GREAT => ord(">"), # >
    TOKEN_LPAREN => ord("("), # (
    TOKEN_RPAREN => ord(")"), # )
    TOKEN_LBRACE => ord("{"), # {
    TOKEN_RBRACE => ord("}"), # }
    TOKEN_BANG => ord("!"), # !
    TOKEN_AND_IF => 128, # &&*
    TOKEN_OR_IF => 129, # ||*
    TOKEN_DSEMI => 130, # ;;*
    TOKEN_DLESS => 131, # <<*
    TOKEN_DGREAT => 132, # >>*
    TOKEN_LESSAND => 133, # <&*
    TOKEN_GREATAND => 134, # >&*
    TOKEN_LESSGREAT => 135, # <>*
    TOKEN_DLESSDASH => 136, # <<-*
    TOKEN_CLOBBER => 137, # >|*
    TOKEN_WORD => 138,
    TOKEN_NAME => 139, # not distinguish WORD and NAME right now
    TOKEN_ASSIGNMENT => 140, # = 
    TOKEN_IO_NUMBER => 141,
    TOKEN_IF => 142, # if*
    TOKEN_THEN => 143, # then*
    TOKEN_ELSE => 144, # else*
    TOKEN_ELIF => 145, # elif*
    TOKEN_FI => 146, # fi*
    TOKEN_DO => 147, # do*
    TOKEN_DONE => 148, # done*
    TOKEN_CASE => 149, # case*
    TOKEN_ESAC => 150, # esac*
    TOKEN_WHILE => 151, # while*
    TOKEN_UNTIL => 152, # until*
    TOKEN_FOR => 153, # for*
    TOKEN_IN => 154, # in*
    TOKEN_LFANG => 155, # [
    TOKEN_RFANG => 156, # ]
    BASE_CONTEXT => 0, 
    RULE_1_CONTEXT => 1,
    RULE_5_CONTEXT => 5,
    RULE_6_CONTEXT => 6,
    RULE_7a_CONTEXT => 7,
    RULE_7b_CONTEXT => 8,
    RULE_8_CONTEXT => 9,
    EOS => -1,
};

#
# %Token = (
#           tokenType => 'constant',
#           text => "f(constant)"
#           start => int,
#           end => int,
#          );
#
#

my $inputStream;
my @tokenStream;
my @tokenList;
my @tokens;
my $debug = 1;
my $index;
my $length;
my ($tokenType, $text, $start, $end);

# Concatenates each line of input shell file.
sub getInputStream {
    my $filename = shift;
    open INPUTFILE, "<".$filename;
    $inputStream.=$_ while (<INPUTFILE>);
    close INPUTFILE;
}

# Extracts all tokens of input file.
sub getTokenStream {
=f    my %bourne => {
            name => 'Bourne shell', 
            metachars => [ qw{ ; & ( ) | < > } ],
    };
    my $delimiters = join '', @{ $bourne{metachars} };
    my $de = '[\s'.$delimiters.']';
    @tokenStream = quotewords($de, 'delimiters', $inputStream);
=cut
    my $de = '[\s;&${}()|<>`]';
    @tokenStream = quotewords($de, 'delimiters', $inputStream);
}

# Skip undef.
sub filter {
    for my $ch(@tokenStream) {
        push @tokenList, $ch if ($ch ne "");
    }
}

# Shell script scanner.
sub scan {

    my $infile = shift;
    
    &getInputStream($infile);
    # print "$inputStream\n";
    
    &getTokenStream;
    # print "@tokenStream";
    
    &filter;
    # print "@tokenList";
    $length = @tokenList;
    $index = 0;
    while ($index < $length) {
        my $record = 1;
        $start = $index;
        $end = $index;
        $text = $tokenList[$index];
        # print "CurToken: $tokenList[$index]\n";
        # my $input = <STDIN>;
        switch ($tokenList[$index]) {
            # print "$tokenList[$index]====\n";
            case "`" {
                $tokenType = TOKEN_WORD;
                $text = "`";
                $index++;
                while (1) {
                    $text .= $tokenList[$index];
                    #print "``````````````\n";
                    #print "$tokenList[$index]\n$text\n";
                    #my $cut = <STDIN>;
                    if ($tokenList[$index] eq "`") {
                        last;
                    }
                    $index++;
                }
                last;
            }
            case " " {
                $record = 0;
                last;
            }
            case "\t" {
                $record = 0;
                last;
            }
            case "[" {
                $tokenType = TOKEN_LFANG;
                last;
            }
            case "]" {
                $tokenType = TOKEN_RFANG;
                last;
            }
            case "\$" { # Skip comments
                $tokenType = TOKEN_WORD;
                $text = tokenize();
                last;
            }
            case "\n" {
                $tokenType = TOKEN_NEWLINE; # TOKEN_NEWLINE
                $text = "NEWLINE";
                # $record = 0;
                last;
            }
            case "&" {
                if ($tokenList[$index + 1] eq "&") {
                    $tokenType = TOKEN_AND_IF; 
                    $text = "&&";
                    $end = $index + 1;
                    $index++;
                }
                elsif ($tokenList[$index + 1]=~/\d+/) {
                    $tokenType = TOKEN_WORD;
                    $text = "&".$tokenList[$index + 1];
                    $index++;
                }
                else {
                    $tokenType = TOKEN_AMP;
                }
                last;
            }
            case '|' {
                if ($tokenList[$index + 1] eq "|") {
                    $tokenType = TOKEN_OR_IF;
                    $text = "||";
                    $end = $index + 1;
                    $index++;
                }
                else {
                    $tokenType = TOKEN_BAR;
                }
                last;
            }
            case ";" {
                if ($tokenList[$index + 1] eq ";") {
                    $tokenType = TOKEN_DSEMI;
                    $text = ";;";
                    $end = $index + 1;
                    $index++;
                }
                else {
                    $tokenType = TOKEN_SEMI; # SEMI
                }
                last;
            }
            case "<" {
                if ($tokenList[$index + 1] eq "<") {
                    if ($tokenList[$index + 2] eq "-") {
                        $tokenType = TOKEN_DLESSDASH;
                        $text = "<<-";
                        $end = $index + 2;
                        $index = $index + 2;
                    }
                    else {
                        $tokenType = TOKEN_DLESS;
                        $text = "<<";
                        $end = $index + 1;
                        $index++;
                    }
                }
                elsif ($tokenList[$index + 1] eq "&") {
                    $tokenType = TOKEN_LESSAND;
                    $text = "<&";
                    $end = $index + 1;
                    $index++;
                }
                elsif ($tokenList[$index + 1] eq ">") {
                    $tokenType = TOKEN_LESSGREAT;
                    $text = "<>";
                    $end = $index + 1;
                    $index++;
                }
                else {
                    $tokenType = TOKEN_LESS;
                }
                last;
            }
            case ">" {
                if ($tokenList[$index + 1] eq ">") {
                    $tokenType = TOKEN_DGREAT;
                    $text = ">>";
                    $end = $index + 1;
                    $index++;
                }
                elsif ($tokenList[$index + 1] eq "&") {
                    $tokenType = TOKEN_GREATAND;
                    $text = ">&";
                    $end = $index + 1;
                    $index++;
                }
                elsif ($tokenList[$index + 1] eq "|") {
                    $tokenType = TOKEN_CLOBBER;
                    $text = ">|";
                    $end = $index + 1;
                    $index++;
                }
                else {
                    $tokenType = TOKEN_GREAT;
                }
                last;
            }
            case "{" {
                $tokenType = TOKEN_LBRACE;
                last;
            }
            case "}" {
                $tokenType = TOKEN_RBRACE;
                last;
            }
            case "(" {
                $tokenType = TOKEN_LPAREN;
                last;
            }
            case ")" {
                $tokenType = TOKEN_RPAREN;
                last;
            }
            case "!" {
                $tokenType = TOKEN_BANG;
                last;
            }
            case "if" {
                $tokenType = TOKEN_IF;
                last;
            }
            case "then" {
                $tokenType = TOKEN_THEN;
                last;
            }
            case "elif" {
                $tokenType = TOKEN_ELIF;
                last;
            }
            case "else" {
                $tokenType = TOKEN_ELSE;
                last;
            }
            case "fi" {
                $tokenType = TOKEN_FI;
                last;
            }
            case "do" {
                $tokenType = TOKEN_DO;
                last;
            }
            case "done" {
                $tokenType = TOKEN_DONE;
                last;
            }
            case "case" {
                $tokenType = TOKEN_CASE;
                last;
            }
            case "esac" {
                $tokenType = TOKEN_ESAC;
                last;
            }
            case "while" {
                $tokenType = TOKEN_WHILE;
                last;
            }
            case "until" {
                $tokenType = TOKEN_UNTIL;
                last;
            }
            case "for" {
                $tokenType = TOKEN_FOR;
                last;
            }
            case "in" {
                $tokenType = TOKEN_IN;
                last;
            }
            case "=" {
                my %temp = %{pop @tokens};
                if ($temp{text}=~/^[a-zA-z_][\w_]+\$/) {
                    $tokenType = TOKEN_ASSIGNMENT;
                    $text = $temp{text}."=";
                    $index++;
                    skipBlank();
                    $text .= tokenize();
                }
                else {
                    push @tokens, \%temp;
                    $tokenType = TOKEN_WORD;
                }
                last;
            }
            else {
                if (index($tokenList[$index], '#') == 0) {
                    my $debugStream = "#";
                    while ($index < $length && $tokenList[$index] ne "\n") {
                        $index++;
                        $debugStream .= $tokenList[$index] if ($debug);
                    }
                    $tokenType = TOKEN_NEWLINE;
                    $text = "NEWLINE";
                    last;
                }
                elsif ($tokenList[$index]=~/\d+/)  {
                    if ($tokenList[$index + 1] eq ">" || $tokenList[$index + 1] eq "<") {
                        $tokenType = TOKEN_IO_NUMBER;
                    }
                    else { # this might should through an exception.
                        $tokenType = TOKEN_WORD;
                    }
                    last;
                }
                my $pos = index($tokenList[$index], "=");
                if ($tokenList[$index]=~/^[a-zA-Z_][\w_]*=/) {
                    $tokenType = TOKEN_ASSIGNMENT;
                    if ($pos == length($tokenList[$index]) - 1) {
                        $index++;
                        skipBlank();
                        $text .= tokenize();
                    }
                }
                elsif ($pos <= 0) {
                    $tokenType = TOKEN_WORD;
                }
                else {
                    
                }
                last;
            }

        }
        if ($record) {
            my %token = (
                    tokenType => $tokenType,
                    text => $text,
                    start => $start,
                    end => $end,
                );
            push @tokens, \%token;
        }
        $index++;
    }
    my %endToken = (
        tokenType => TOKEN_END_OF_STREAM,
        text => "END_OF_STREAM",
        $start => $index,
        $end => $index,
    );
    push @tokens, \%endToken;
    open OUTPUTFILE, ">tokens.tk";
    for my $tok(@tokens) {
        print OUTPUTFILE "$$tok{tokenType}\n$$tok{text}\n========\n";
    }
    close(OUPUTFILE);
    return @tokens;
}

if (1 == @ARGV) {
    &scan(@ARGV);
}
else {
    print "Only two args!\n";
}

sub tokenize() 
{
    my $text;
    skipBlank();
    if ($tokenList[$index] eq "`") {
        $index++;
        $text = "`";
        while ($tokenList[$index] ne "`") {
            $text = $text.$tokenList[$index];
            $index++;
        }
        $text .= "`";
    }
    elsif ($tokenList[$index] eq "\$") {
        $text = "\$";
        $index++;
        if ($tokenList[$index] ne "(" && $tokenList[$index] ne "{") {
            $text .= $tokenList[$index];
            return $text;
        }
        $text .= $tokenList[$index];
        my $ok = 1;
        my ($lpattern, $rpattern);
        if ($tokenList[$index] eq "(") {
            $lpattern = "(";
            $rpattern = ")";
        }
        else {
            $lpattern = "{";
            $rpattern = "}";
        }
        while ($ok) {
            $index++;
            $text .= $tokenList[$index];
            $ok-- if (index($tokenList[$index], $rpattern) == 0);
            $ok++ if (index($tokenList[$index], $lpattern) == 0);
        }
    }
    else {
        $text = $tokenList[$index];
    }
    return $text;
}


sub skipBlank()
{
    while ($tokenList[$index] eq " " 
            || $tokenList[$index] eq "\t" 
            || $tokenList[$index] eq "\n") {
        $index++;
    }
}