package smallChanges.statements;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import smallChanges.Tokenization.SCToken;
import smallChanges.Tokenization.TokenMappingEntry;
import smallChanges.Tokenization.TokenMappingResultSet;
import smallChanges.Tokenization.TokenMappingEntry.TokenMappingStatus;

public class StatementDiff {

    public static final SCToken NullToken = new SCToken(0L, -1,-1,-1,-1,"<NULL>");
    private TokenMappingResultSet resultSet;
    
    public StatementDiff(Statement left,Statement right)
    {
        
        ArrayList<SCToken> leftTokens = new ArrayList<SCToken>();
        ArrayList<SCToken> rightTokens = new ArrayList<SCToken>();

        //System.err.println(left +" <--> " + right);
        if (left.getTokensInRange() != null) {
            Iterator<SCToken> tokenIter = left.getTokensInRange().iterator();
            while(tokenIter.hasNext())
            {
                SCToken token = tokenIter.next();

                leftTokens.add(token);
            }
        }

        if (right.getTokensInRange() != null) {
            Iterator<SCToken> tokenIter = right.getTokensInRange().iterator();
            while(tokenIter.hasNext())
            {
                SCToken token = tokenIter.next();

                rightTokens.add(token);
            }
        }
        
        ////////////////////
        org.incava.util.diff.Diff differ = new org.incava.util.diff.Diff(
                leftTokens, rightTokens);

        List diffResults = differ.diff();

        Iterator diffs = diffResults.iterator();

        int leftCurrToken = 0;
        int rightCurrToken = 0;
        TokenMappingResultSet localResultSet = new TokenMappingResultSet();

        while (diffs.hasNext()) {
            org.incava.util.diff.Difference diff = (org.incava.util.diff.Difference) diffs.next();
            //System.err.println(diff);

            // System.err.println(diff.toString());

            for (; rightCurrToken < diff.getAddedStart()
            && leftCurrToken < diff.getDeletedStart(); rightCurrToken++, leftCurrToken++) {
                localResultSet.add(new TokenMappingEntry(leftTokens.get(leftCurrToken),
                		rightTokens.get(rightCurrToken), 0,TokenMappingStatus.VALID,
                		left, right));
            }

            // map the diff chunk!
            // i need to handle pure adds and pure deletes here!
            // no output for pure adds or pure delete!
            if (!(diff.getAddedEnd() == -1 || diff.getDeletedEnd() == -1)) {
                mapTokens(leftTokens,  diff.getDeletedStart(), diff.getDeletedEnd(),
                        rightTokens,  diff.getAddedStart(), diff.getAddedEnd(),localResultSet,
                        left, right);
            }
            if (diff.getDeletedEnd() == -1) {
                // pure add
                for (int rightLine = rightCurrToken; rightLine <= diff.getAddedEnd(); rightLine++) {
                    localResultSet.add(new TokenMappingEntry(NullToken,rightTokens.get(rightLine),
                    		0,TokenMappingStatus.INVALID,SingleStatement.NullStatement, right));
                }
            }
            if (diff.getAddedEnd() == -1) {
                // pure Delete
                for (int leftLine = leftCurrToken; leftLine <= diff.getDeletedEnd(); leftLine++) {
                    localResultSet.add(new TokenMappingEntry(leftTokens.get(leftLine), 
                    		NullToken, 0, TokenMappingStatus.INVALID,left, SingleStatement.NullStatement));

                }
            }
            if (diff.getAddedEnd() == -1) {
                rightCurrToken = diff.getAddedStart();
            } else {
                rightCurrToken = diff.getAddedEnd() + 1;

            }
            if (diff.getDeletedEnd() == -1) {
                leftCurrToken = diff.getDeletedStart();
            } else {
                leftCurrToken = diff.getDeletedEnd() + 1;
            }
        }

        for (int rightLine = rightCurrToken, leftLine = leftCurrToken; rightLine < rightTokens.size() && leftLine < leftTokens.size(); rightLine++, leftLine++) {
            localResultSet.add(new TokenMappingEntry(leftTokens.get(leftLine), rightTokens.get(rightLine),
            		0, TokenMappingStatus.VALID,left, right));
        }
        resultSet = localResultSet;
    }
    
    private ArrayList<String> fillWithStringsFromTokens(
            ArrayList<SCToken> tokenTokens, int start, int end) {
        ArrayList<String> tokenBody = new ArrayList<String>();

        Iterator<SCToken> stateIter = tokenTokens.iterator();
        int i = 0;
        while (stateIter.hasNext()) {
            if (i >= start && i <= end) {
                tokenBody.add(stateIter.next().getImage());
            } else {
                stateIter.next();
            }
            i++;
        }

        return tokenBody;
    }
    
    private ArrayList<SCToken> fillWithTokens(ArrayList<SCToken> tokenSCTokens, int start, int end) {
        ArrayList<SCToken> tokenBody = new ArrayList<SCToken>();

        Iterator<SCToken> stateIter = tokenSCTokens.iterator();
        int i = 0;
        while (stateIter.hasNext()) {
            if (i >= start && i <= end) {
                tokenBody.add(stateIter.next());
            } else {
                stateIter.next();
            }
            i++;
        }
        return tokenBody;
    }
    private void mapTokens(ArrayList<SCToken> leftTokens,  int startLineLeft, int endLineLeft, ArrayList<SCToken> rightTokens, 
            int startLineRight, int endLineRight, TokenMappingResultSet resultSet,Statement left, Statement right)
    {
        double[] weightedGraph;
        double[][] lineNumberMapping;
        double tmp;
        int newEndLineLeft;
        int newEndLineRight;
        ArrayList<String> leftLines;
        ArrayList<String> rightLines;
        ArrayList<SCToken> leftLinesTokens;
        ArrayList<SCToken> rightLinesTokens;
        int extraLines = 0;

        leftLines = fillWithStringsFromTokens(leftTokens,startLineLeft, endLineLeft);
        rightLines = fillWithStringsFromTokens(rightTokens,startLineRight, endLineRight);

        leftLinesTokens = fillWithTokens(leftTokens,startLineLeft, endLineLeft);
        rightLinesTokens = fillWithTokens(rightTokens,startLineRight, endLineRight);

        // when things get padded out i need to ignore the mapping of the pad
        // lines:

        /*
         * 1: value--; 2: otherValue--; -- 1: value++;
         * 
         * the two 1 lines should match. the lower chunk is padded out with a
         * line to match the size, but whatever matches to it should be ignored
         * 
         */
        if (endLineLeft - startLineLeft < endLineRight - startLineRight) {
            for (int i = (endLineRight - startLineRight)- (endLineLeft - startLineLeft); i > 0; i--) {
                leftLines.add("");
                leftLinesTokens.add(null);
                extraLines--;
            }
        } else if (endLineLeft - startLineLeft > endLineRight - startLineRight) {
            for (int i = (endLineLeft - startLineLeft)- (endLineRight - startLineRight); i > 0; i--) {
                rightLines.add("");
                rightLinesTokens.add(null);
                extraLines++;
            }
        }

        newEndLineLeft = leftLines.size() - 1;
        newEndLineRight = rightLines.size() - 1;

        weightedGraph = findEditDistWeightedBipartiteGraph(0 , newEndLineLeft, 0 ,newEndLineRight, leftLinesTokens, rightLinesTokens, false);

        // run hungarian algorithm (bipartite matching)
        lineNumberMapping = munkres.HungarianAlgorithm.RunHungarian(weightedGraph);

        // if rows > columns the HungarianAlgorithm transposes the
        // matrix!
        int flipMatrix = 0;
        /* never flip the matrix b/c we pad it out! */
        if (false && (newEndLineLeft - startLineLeft + 1) > (newEndLineRight- startLineRight + 1)) {
            // "OH NO FLIPPING THE MATRIX\n";
            flipMatrix = 1;
        }

        int index = 0;
        while (index < lineNumberMapping.length) {
            // remove blank lines and { }
            if (flipMatrix == 1) {
                tmp = lineNumberMapping[index][0];
                lineNumberMapping[index][0] = lineNumberMapping[index][1];
                lineNumberMapping[index][1] = tmp;
                // }
            }
            index++;
        }

        // calculate crossing
        // don't do this here, we do this later on at the token level
        // this would only calculate crossings for this small diff chunk
        // which may not be accurate if the chunks move around.
        // lineNumberMapping = LineNumberMapping.calculateCrossings(lineNumberMapping);

        try {
            addToResultSet(lineNumberMapping, startLineLeft, startLineRight,
                    endLineLeft, endLineRight, leftTokens, rightTokens /* , new FileWriter(System.err.) */,
                    extraLines, resultSet,left, right);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        // output line number mapping
        // try {
        // outputToFile(lineNumberMapping, startLineLeft, startLineRight,
        // endLineLeft, endLineRight, leftTokens,
        // rightTokens /*, new FileWriter(System.err.)*/, extraLines);
        // } catch (IOException e) {
        // // TODO Auto-generated catch block
        // e.printStackTrace();
        // }
    }
    
    private double[] findEditDistWeightedBipartiteGraph(int startLineOne,
            int stopLineOne, int startLineTwo, int stopLineTwo,
            ArrayList<SCToken> fileOneData, ArrayList<SCToken> fileTwoData, boolean runTokenEditDistance) {

        double[][] weightedGraph;
        int fileOneSize = stopLineOne - startLineOne + 1;
        int fileTwoSize = stopLineTwo - startLineTwo + 1;
        double[] results = new double[2 + fileOneSize * fileTwoSize];

        int fileOneIndex, fileTwoIndex;
        SCToken lineFromOne;
        SCToken lineFromTwo;
        double normTokenEditDist;
        int i;

        weightedGraph = new double[(java.lang.Math
                .max(fileOneSize, fileTwoSize))][(java.lang.Math.max(
                fileOneSize, fileTwoSize))];

        results[0] = fileOneSize;
        results[1] = fileTwoSize;
        i = 2;
        /*
         * startLineOne --; stopLineOne --; startLineTwo --; stopLineTwo --;
         */

        for (fileOneIndex = startLineOne; fileOneIndex <= stopLineOne; fileOneIndex++) {

            lineFromOne = fileOneData.get(fileOneIndex);

            for (fileTwoIndex = startLineTwo; fileTwoIndex <= stopLineTwo; fileTwoIndex++) {

                lineFromTwo = fileTwoData.get(fileTwoIndex);

                if (lineFromOne == null || lineFromTwo == null) {
                    normTokenEditDist = 1.0;
                } else {
                    // System.err.println(lineFromOne.getImage() + " <> " +
                    // lineFromTwo.getImage());
                        normTokenEditDist = lineFromOne.getNormalizedLevenshteinEditDistance(lineFromTwo);
                }
                // System.err.println("-->"+lineFromOne + " || "+ lineFromTwo +
                // " >>> "+minEditDist);
                if (normTokenEditDist == 0.0) {
                    weightedGraph[fileOneIndex - startLineOne][fileTwoIndex- startLineTwo] = 0.0;
                    weightedGraph[fileTwoIndex - startLineTwo][fileOneIndex- startLineOne] = 0.0;
                } else {
                    /* really use use MAX(lineFromOneLength, lineFromTwoLength) */
                    weightedGraph[fileOneIndex - startLineOne][fileTwoIndex
                            - startLineTwo] = normTokenEditDist;
                    // (1.0 * minEditDist / java.lang.Math.max(lineFromOneLength
                    // , lineFromTwoLength));
                    weightedGraph[fileTwoIndex - startLineTwo][fileOneIndex
                            - startLineOne] = normTokenEditDist;
                    // (1.0 * minEditDist / java.lang.Math.max(lineFromOneLength
                    // , lineFromTwoLength));
                }
                results[i++] = weightedGraph[fileOneIndex - startLineOne][fileTwoIndex
                        - startLineTwo];
            }

        }
        return results;
    }

    private void addToResultSet(double[][] lineNumberMapping, int startLineOne,
            int startLineTwo, int endLineOne, int endLineTwo,
            ArrayList<SCToken> fileOneData, ArrayList<SCToken> fileTwoData,
            int extraLines, TokenMappingResultSet resultSet,
            Statement left, Statement right)
            throws IOException {

        // if extra lines < 0, then lines were added to LEFT
        // if extra lines > 0, then lines were added to RIGHT

        for (int index = 0; index < lineNumberMapping.length; index++) {
            if ((extraLines < 0 && (int) (lineNumberMapping[index][0] + startLineOne) > endLineOne /*
                                                                                                     * +
                                                                                                     * extraLines
                                                                                                     */)
                    || (extraLines > 0 && (int) (lineNumberMapping[index][1] + startLineTwo) > endLineTwo /*- extraLines*/)) {
                if (extraLines > 0) {
                    resultSet.add(new TokenMappingEntry(fileOneData.get((int) (lineNumberMapping[index][0])
                                    + startLineOne), NullToken, (int) lineNumberMapping[index][3],TokenMappingStatus.INVALID,
                                    left, SingleStatement.NullStatement));
                } else if (extraLines < 0) {
                    resultSet.add(new TokenMappingEntry(NullToken,
                            fileTwoData.get((int) (lineNumberMapping[index][1])+ startLineTwo),
                            (int)lineNumberMapping[index][3], TokenMappingStatus.INVALID,SingleStatement.NullStatement, right));
                }
            } else {
                resultSet.add(new TokenMappingEntry(fileOneData.get((int) (lineNumberMapping[index][0])+ startLineOne), 
                        fileTwoData.get((int) (lineNumberMapping[index][1])+ startLineTwo),(int) lineNumberMapping[index][3],
                        TokenMappingStatus.INVALID,left, right));
            }
        }
    }

    public TokenMappingResultSet getResultSet() {
        return resultSet;
    }

}
