package LogParse;
use warnings;
use strict;
use Storable qw(nstore retrieve);
use POSIX;
#use Exporter;
#our @EXPORT = qw();
# custom modules
use SimpleError;
use FieldHash;

use constant NA_VALUE => 'n/a';

use constant {
    # Test#  Pin  PF    Value  L-Limit  U-Limit Unit     DataName Comment/PinName
    DUT_TESTNO      => 0,
    DUT_PIN         => 1,
    DUT_PF          => 2,
    DUT_VALUE       => 3,
    DUT_L_LIMIT     => 4,
    DUT_U_LIMIT     => 5,
    DUT_UNIT        => 6,
    DUT_DATA_NAME   => 7,
    DUT_COMMENT     => 8,
    DUT_PIN_NAME    => 8, # yes, same thing as 'DUT_COMMENT'
};
# field names of the log structure
use constant LS_FIELD_NAMES => [
  qw(line_no test_no pin_char value_index l_limit u_limit unit data_name comment pin_name item_name sql_type)
];
# meta fields names and its SQL types
use constant TESTDATE_NAMES     => qw( Test_Start_Date Test_End_Date );
use constant SQL_TYPES_TESTDATE => qw( DATETIME DATETIME );
use constant TESTDATA_NAMES     => qw( Dut_No  IC_No    Waf_No   ICWaf_No PF      Bin     Cat     Xadr     Yadr );
use constant SQL_TYPES_TESTDATA => qw( TINYINT INTEGER  SMALLINT SMALLINT CHAR(4) TINYINT TINYINT SMALLINT SMALLINT );
use constant SQL_TYPE_VALUE     => 'DOUBLE';
use constant SQL_TYPE_PF        => 'VARCHAR(2)';
# field accessors
tie our %LS_Fields, 'FieldHash', LS_FIELD_NAMES;
tie our %TD_Fields, 'FieldHash', [ TESTDATA_NAMES ];

### a tiny package
{   # scalar tier, automatically trim leading & trailing spaces
    package LineChomper;
    sub TIESCALAR { bless \my $self, $_[0]; }
    # remove leading & trailing spaces (" \t\r\n\b\v")
    sub STORE { (${ $_[0] } = $_[1]) =~ s/(?:^\s+|\s+$)//g; }
    sub FETCH { ${ $_[0] }; }
}

sub new {
    my $class = shift;
    my $self = +{ @_ };
    return bless $self, $class;
}

sub set {
    my $self = shift;
    %{ $self } = ( %$self, @_ );
    return $self;
}

sub set_log_file {
    my $self = shift;
    my ($loga) = @_;
    $self->{Log_File} = $loga;
}

sub open_log_file {
    my $self = shift;
    my $loga = defined $_[0] ? $_[0]
        : defined $self->{Log_File} ? $self->{Log_File}
        : error("no log file available");
    $self->{Log_File} = $loga;
    open LOG_INPUT, '<', $self->{Log_File}
        or error("cannot open: $self->{Log_File}: $!");
    ## set the log file
    $self->{File_Handle} = \*LOG_INPUT;
    $self->{File_Handle};
}

sub close_log_file {
    my $self = shift;
    if (exists $self->{File_Handle}) {
        close $self->{File_Handle}
            or warning("problem when closing file: $self->{Log_File}: $!");
        delete $self->{File_Handle};
    }
    1;
}

# return value:
# * unexpected EOF: -1
# * normal end: 0
# * buffer reading ok: 1
# * inconsistent number of lines: 2
sub fill_buffer {
    my $self = shift;
    if ($self->{tmp}{Log_End}) { # the $#END#$ tag sentinel
        delete $self->{tmp}{Log_End};
        return 0;
    }
    my $fh = $self->{File_Handle};
    tie my $line, 'LineChomper';
    $self->{Buffer} = +{}; # purge previous buffer
    ## ignore (possible) leading lines
    while (1) {
        return -1 unless defined($line = <$fh>);
        last if $line =~ /^\$\#S?HEADER\#\$/;
    }
    ## $#HEADER#$
    ## the global first header contains additional information of the wafer
    ## which is named '$#SHEADER#$', the rest are named '$#HEADER#$'
    while (1) {
        return -1 unless defined($line = <$fh>);
        last if $line =~ /^\$\#DATE\#\$/;
        push @{ $self->{Buffer}{Header} }, $line;
    }
    ## $#DATE#$ (fixed 2 lines)
    push(@{ $self->{Buffer}{Date} }, ($line = <$fh>)) for 1 .. 2;
    ## $#TESTDATA#$ and the following heading line ignored
    $line = <$fh> for 1 .. 2;
    ## #$TESTDATA$# (fixed 1 line)
    push( @{ $self->{Buffer}{TestData} }, ($line = <$fh>) );
    ## $#DUT?TEST#$
    ## tag '$#DUT1TEST#$' and the following heading line ignored
    $line = <$fh> for 1 .. 2;
    while (1) {
        return -1 unless defined($line = <$fh>);
        last if length($line) < 2 || $line eq '$#END#$';
        push @{ $self->{Buffer}{Dut1Test} }, $line;
    }
    ## feedback the reading state, to tell caller whether there is any more data
    ## or unexpected EOF occured
    $self->{tmp}{Log_End} = 1 if ($line eq '$#END#$');
    return -1 unless (defined $line);
    return 2 if ($self->{Num_Dut_Lines} && @{$self->{Buffer}{Dut1Test}} != $self->{Num_Dut_Lines});
    return 1;  # normal end of one die's test data
}

sub read_ls_source {
    my $self = shift;
    error("log struct source file not present") unless defined $self->{LS_Source};
    my $file = $self->{LS_Source};
    open LS_SRC, '<', $self->{LS_Source} or error("cannot open log struct source '$file': $!");
    my $valid = 0;
    my @lines;
    tie my $line, 'LineChomper';
    while (<LS_SRC>) {
        $line = $_;
        next if $line =~ /^\#/ || $line =~ /^\s*$/;
        if ($line =~ /^Test\#/) {
            $valid = 1;
        } elsif ($valid) {
            push @lines, $line;
        }
    }
    close LS_SRC or warning("problem when closing '$file': $!");
    return \@lines;
}

sub create_log_struct {
    my $self = shift;
    my ($logs) = @_;
    my %comments; # testno => comment
    my $item_index = 0;
    $self->{Wafer}{Log_Struct}[0] = LS_FIELD_NAMES; # first element is the hash keys
    ## deduce the log structure by the first die's testing logs
    for my $i (0 .. $#$logs) {
        my @fields = split /\s+/, $logs->[$i], 9; # total 9 items each log line
        ## comment string formatting
        (my $this_comment = $fields[DUT_COMMENT]) =~ s/(?:^[[:punct:]]{2,}|[[:punct:]]{2,}$)//g;
        $this_comment =~ s/,/;/g; # prevent .CSV format conflict
        ## initial scratch of the data item
        (tied %LS_Fields)->data([ ]);
        # meta info initialization
        $LS_Fields{line_no}         = $i;
        $LS_Fields{test_no}         = $fields[DUT_TESTNO];
        $LS_Fields{pin_char}        = $fields[DUT_PIN];
        $LS_Fields{value_index}     = undef;                    # needs assignment
        $LS_Fields{l_limit}         = $fields[DUT_L_LIMIT];
        $LS_Fields{u_limit}         = $fields[DUT_U_LIMIT];
        $LS_Fields{unit}            = $fields[DUT_UNIT];
        $LS_Fields{data_name}       = $fields[DUT_DATA_NAME];
        $LS_Fields{comment}         = $this_comment;            # default to current line's comment
        $LS_Fields{pin_name}        = '-';                      # default to '-' (none)
        $LS_Fields{item_name}       = $LS_Fields{data_name} ne '-' ? # if feasible, use DataName
                                      $LS_Fields{data_name} : 'Data' . $item_index++; # --> DataName module
        $LS_Fields{sql_type}        = 'DOUBLE';                 # change to 'VARCHAR(2)' for PF items
        ## modify according to the PIN characteristic:
        ## =: pattern, +: goup judgement, Number: pin's measured data, ^: calculated data
        ## = and ^: comment/pin_name use default, Number: comment needs judge, pin_name uses current line's
        if ($fields[DUT_PIN] eq '+') {
            $comments{$fields[DUT_TESTNO]} = $this_comment; # store group comment
            $LS_Fields{value_index} = DUT_PF;
            $LS_Fields{sql_type} = 'VARCHAR(2)';
        } elsif ($fields[DUT_PIN] eq '=') {
            $LS_Fields{value_index} = DUT_PF;
            $LS_Fields{sql_type} = 'VARCHAR(2)';
        } elsif ($fields[DUT_PIN] eq '^') {
            $LS_Fields{value_index} = DUT_VALUE;
        } elsif ($fields[DUT_PIN] =~ /^\d+$/) { # for pin, the Comment/PinName field is used as PinName
            $LS_Fields{value_index} = DUT_VALUE;
            $LS_Fields{comment}     = $comments{$fields[DUT_TESTNO]}; # comment is from previous '+' line
            $LS_Fields{pin_name}    = $fields[DUT_COMMENT];     # current line's Comment/PinName is pin name
        } else {
            error( "OOPS! PIN=[". $fields[DUT_PIN] . "], what's this?");
        }
        push @{ $self->{Wafer}{Log_Struct} }, (tied %LS_Fields)->data();
    }
    ## number of data lines for each die (expected)
    $self->{Num_Dut_Lines} = @$logs;
    1;
}

## first die's processing
sub init_parse {
    my $self = shift;
    ## read the log data of the first die
    my $read_state = $self->fill_buffer();
    error("error for first die's log data reading.") if ($read_state != 1);
    ## the wafer's testing summary
    for (@{ $self->{Buffer}{Header} }) {
        next unless length($_) > 2;            # empty lines
        if (/^(\S+)\s*:\s*(\S*)/) {            # Some-Key: Some-Value (value may be empty)
            my ($key, $value) = ($1, $2);
            $key =~ s/[[:punct:]]/_/g;
            $self->{Wafer}{$key} = (defined($value) && $value ne '') ? $value : NA_VALUE();
        }
    }
    ## the log structure, if not present, will be created using the first die's test log (doing some parsing),
    ## unless there is another log structure source provided (which is one DUT's test log lines)
    ## and then, that source will be used instead
    ## this is for case that "fp off; al on" was not set, and the log lines of the first die's would not
    ## be complete, so we have to manually provide a full version got in the later part of the .log_a file
    ## to make sure the meta data (log structure) is complete, and for those with incomplete log lines,
    ## corresponding testing value will be empty (string value '', search [$#VAL_NOTE_EXISTENT#$] in this file)
    unless (defined $self->{Wafer}{Log_Struct}) {
        my $struct_source = defined $self->{LS_Source} ?
            $self->read_ls_source() : $self->{Buffer}{Dut1Test};
        $self->create_log_struct($struct_source);
    }
    ## collect data names and SQL types
    my (@item_names, @sql_types);
    map {;
            (tied %LS_Fields)->data($self->{Wafer}{Log_Struct}[$_]);
            push(@item_names, $LS_Fields{item_name});
            push(@sql_types , $LS_Fields{sql_type});
    } ( 1 .. $#{ $self->{Wafer}{Log_Struct} } );
    ##@@[FIELDS_NAME_STORE]: please search for @@[FIELDS_DATA_STORE] in this file, and
    ##  make sure their order is conformant
    $self->{Wafer}{Dies_Data}[0] = [ +TESTDATE_NAMES, +TESTDATA_NAMES, @item_names ];
    $self->{Wafer}{SQL_Types}    = [ +SQL_TYPES_TESTDATE, +SQL_TYPES_TESTDATA, @sql_types ];
    return $read_state;
}

### Pre-condition: $self->{Buffer}{*} filled
sub collect_data_from_buffer {
    my $self = shift;
    ## 1. section '$#DATE#$'
    my ($start_line, $end_line) = @{ $self->{Buffer}{Date} }[0, 1];
    my ($test_start_date, $test_end_date);
    $test_start_date = (split(/\s+/, $start_line))[1]; # Test-Start-Date  2007/02/10_16:01:53
    $test_end_date = (split(/\s+/, $end_line))[1];     # Test-End-Date    2007/02/10_16:01:54
    ## 2. section '$#TESTDATA#$'
    my @test_data = split /\s+/, $self->{Buffer}{TestData}[0]; # only ONE line
    (tied %TD_Fields)->data(\@test_data);
    # Xadr = '-' , Yadr = '-' means a non-valid testing (open/short engmode)
    if ($TD_Fields{Xadr} eq '-' || $TD_Fields{Yadr} eq '-') {
        warning("Non-test data! IC_No: $TD_Fields{IC_No}, ICWaf_No: $TD_Fields{ICWaf_No}");
        return 1
    }
    ## 3. section '$#DUT?TEST#$'
    my $log_struct = $self->{Wafer}{Log_Struct};
    my $log_lines = $self->{Buffer}{Dut1Test};
    my @test_values;
    # EXPLANATION:
    # $log_struct is an array ref, which contains the info of the log lines (meta info)
    # each log_struct line (1 .. END), designates a data source from the log lines buffer (DUT?TEST):
    # $log_struct->[?][LINE_NO]     -> line# in the DUT?TEST buffer (get it for split)
    # $log_struct->[?][VALUE_INDEX] -> index of field from the split array
    for my $i (1 .. $#$log_struct) { # for each desired item data
        (tied %LS_Fields)->data($log_struct->[$i]);
        my $line_no = $LS_Fields{line_no};
        my $test_value;
        if (defined $log_lines->[$line_no]) {
            $test_value = ( split /\s+/, $log_lines->[$line_no] )[ $LS_Fields{value_index} ];
        } else {
            $test_value = ''; # [$#VAL_NOTE_EXISTENT#$]not existent log line...
        }
        push @test_values, $test_value;
    }
    ## construct one whole record for the die (in an array, and stored as an array ref)
    ##@@[FIELDS_DATA_STORE]: please search for @@[FIELDS_NAME_STORE] in this file, and
    ##  make sure theire order is conformant
    push @{ $self->{Wafer}{Dies_Data} }, [ $test_start_date, $test_end_date, @test_data, @test_values ];
    ## {{SAFE_MODE}}
    #$#{ $self->{Wafer}{Dies_Data} } == $TD_Fields{ICWaf_No}
    #    or error("IC# inconsistent: repository: $#{ $self->{Wafer}{Dies_Data} }, ICWaf_NO: $TD_Fields{ICWaf_No}");
    1;
}

sub create_adr_table {
    my $self = shift;
    my $dies_data = $self->{Wafer}{Dies_Data};
    my $data_names = $dies_data->[0];
    my $table = +{ };
    my %counter;
    tie my %data_fields, 'FieldHash', $data_names;
    for my $i (1 .. $#$dies_data) { # traverse each die
        my $ic = $dies_data->[$i];
        (tied %data_fields)->data($ic);
        my ($xadr, $yadr) = ( $data_fields{Xadr}, $data_fields{Yadr} );
        push @{ $table->{ $xadr }{ $yadr } }, $i;
        $counter{$xadr, $yadr} = 1; # re-tested dies are not counted in gross dies
    }
    $self->{Wafer}{Gross_Dies} = scalar keys %counter;
    $self->{Wafer}{Parse_Dies} = $#$dies_data;
    $self->{Wafer}{Adr_Table}  = $table;
    1;
}

sub calculate_test_time {
    my $self = shift;
    my $dies_data = $self->{Wafer}{Dies_Data};
    my $data_names = $dies_data->[0];
    tie my %data_fields, 'FieldHash', $data_names;
    (tied %data_fields)->data($dies_data->[1]);            # the first die
    my $wafer_cp_start = $data_fields{Test_Start_Date};
    (tied %data_fields)->data($dies_data->[$#$dies_data]); # the last die
    my $wafer_cp_end   = $data_fields{Test_End_Date};
    $self->{Wafer}{CP_Start}     = $wafer_cp_start;
    $self->{Wafer}{CP_End}       = $wafer_cp_end;
    $self->{Wafer}{CP_Seconds}   = to_unixtime($wafer_cp_end) - to_unixtime($wafer_cp_start);
    $self->{Wafer}{MilliS_Per_Die} = sprintf("%.3lf", 1000 * $self->{Wafer}{CP_Seconds} / $self->{Wafer}{Parse_Dies});
    return $self->{Wafer}{CP_Seconds};
}

sub set_meta_data {
    my $self = shift;
    my $wafer_id_re = qr/^(\w{9})\.?(\d{1,2})-?(\d{2})/o;
    if (! defined($self->{Wafer}{Device_Name}) || $self->{Wafer}{Device_Name} eq NA_VALUE()) {
        if ($self->{Wafer}{Test_Program_Name} =~ /(hm\d+)/i) {
            $self->{Wafer}{Device_Name} = uc($1);
            message("Device_Name: $self->{Wafer}{Device_Name}");
        } else {
            warning("Device_Name: not present and cannot deduce");
        }
    }
    if (! defined($self->{Wafer}{Lot_Number}) || $self->{Wafer}{Lot_Number} eq NA_VALUE()) {
        # sample: 3HED19102.1-06
        if ($self->{Wafer}{Log_File} =~ /$wafer_id_re/) {
            my ($lot_id, $lot_sub, $wafer_no) = ($1, $2, $3);
            $self->{Wafer}{Lot_Number} = "${lot_id}${lot_sub}-${wafer_no}";
            $self->{Wafer}{Wafer_Number} = 0 + $wafer_no; # convert "06" to "6"
            message("Lot_Number: $self->{Wafer}{Lot_Number}");
            message("Wafer_Number: $self->{Wafer}{Wafer_Number}");
        } else {
            error("Lot_Number: not present and cannot deduce");
        }
    }
    $self->{Wafer}{Lot_Number} =~ /$wafer_id_re/;
    $self->{Wafer}{Lot_ID}   = "${1}.${2}";
    $self->{Wafer}{Wafer_ID} = "${1}.${2}-${3}";
    return $self;
}

## class method
# pre-condition: time string is in format: YYYY-MM-DD HH-MM-SS
# separating tokens: any punctuation, or space ([[:punct:] ])
sub to_unixtime {
    my ($time_str) = @_;
    #my @elems = $_[0] =~ /^\s*(\d+)[^[:alnum:]](\d+)[^[:alnum:]](\d+)[^[:alnum:]](\d+)[^[:alnum:]](\d+)[^[:alnum:]](\d+)\s*$/;
    my @elems = split /[[:punct:] ]/, $time_str;
    ## SAFE_MODE
    #error("time string format invalid") unless @elems == 6;
    map {; $_ += 0; } @elems;
    $elems[0] -= 1900; # year starting from 1900
    $elems[1] -= 1;    # month starting from 0
    return POSIX::mktime(reverse @elems);
}

sub parse {
    my $self = shift;
    my ($parse_max) = @_;
    my $read_state;
    my $parse_count = 0;
    my $partial_count = 0;
    my ($parse_begin, $parse_end);
    ### open .log_a file for reading
    $self->open_log_file();
    ### time reckoning
    $parse_begin = time();
    message("Parsing BEGIN: ", scalar localtime($parse_begin));
    ### the parsing process
    for ( $read_state = $self->init_parse(); $read_state > 0; $read_state = $self->fill_buffer() ) {
        if ($read_state >= 2) { # inconsistent data lines read
            ++$partial_count;
        }
        printf STDERR "\rNO:%5d", ++$parse_count;
        $self->collect_data_from_buffer();
        last if $parse_max && $parse_count >= $parse_max;
    }
    # only good ending if read_state is 0
    error("unexpected EOF: read state: $read_state") if ($read_state < 0);
    print STDERR "\n";
    ### time reckoning
    $parse_end = time();
    message("Parsing   END: ", scalar localtime($parse_end));
    message("Time consumed: ", ($parse_end - $parse_begin), "s, ",
        sprintf("%.3lf", 1000 * ($parse_end - $parse_begin) / $parse_count), "ms per die.");
    ### other booking works
    $self->create_adr_table();
    $self->calculate_test_time();
    $self->{Wafer}{Partial_Dies} = $partial_count;
    message("Parsed: $self->{Wafer}{Parse_Dies}; Partial: $self->{Wafer}{Partial_Dies}; Gross: $self->{Wafer}{Gross_Dies}");
    message("Wafer CP Time Stats:");
    message("Waf. CP Start: ", $self->{Wafer}{CP_Start});
    message("Waf. CP End  : ", $self->{Wafer}{CP_End});
    message("CP Time Stats: ", sprintf("total: %d:%02d, %.3lf ms per die",
            int($self->{Wafer}{CP_Seconds} / 60), $self->{Wafer}{CP_Seconds} % 60, $self->{Wafer}{MilliS_Per_Die}) );
    ### set the Log_File attribute in the resulting data
    $self->{Wafer}{Log_File} = $self->{Log_File};
    ### set miscellaneous meta data, Device Name, Lot ID, Wafer Number, etc...
    $self->set_meta_data();
    ### done
    $self->close_log_file();
    return $self->{Wafer};
}

1;
