defects4j_project
stringclasses
12 values
defects4j_bug_id
stringlengths
1
3
file_path
stringlengths
38
95
bug_start_line
stringlengths
1
4
bug_end_line
stringlengths
2
4
issue_title
stringlengths
13
150
issue_description
stringlengths
4
8.74k
original_src
stringlengths
44
9k
original_src_wo_comments
stringlengths
38
5.83k
fixed_src
stringlengths
40
9.55k
fixed_src_wo_comments
stringlengths
34
5.76k
Math
72
src/main/java/org/apache/commons/math/analysis/solvers/BrentSolver.java
98
144
Brent solver returns the wrong value if either bracket endpoint is root
The solve(final UnivariateRealFunction f, final double min, final double max, final double initial) function returns yMin or yMax if min or max are deemed to be roots, respectively, instead of min or max.
public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { clearResult(); verifySequence(min, initial, max); // return the initial guess if it is good enough double yInitial = f.value(initial); if (Math.abs(yInitial) <= functionValueAccuracy) { setResult(initial, 0); return result; } // return the first endpoint if it is good enough double yMin = f.value(min); if (Math.abs(yMin) <= functionValueAccuracy) { setResult(yMin, 0); return result; } // reduce interval if min and initial bracket the root if (yInitial * yMin < 0) { return solve(f, min, yMin, initial, yInitial, min, yMin); } // return the second endpoint if it is good enough double yMax = f.value(max); if (Math.abs(yMax) <= functionValueAccuracy) { setResult(yMax, 0); return result; } // reduce interval if initial and max bracket the root if (yInitial * yMax < 0) { return solve(f, initial, yInitial, max, yMax, initial, yInitial); } if (yMin * yMax > 0) { throw MathRuntimeException.createIllegalArgumentException( NON_BRACKETING_MESSAGE, min, max, yMin, yMax); } // full Brent algorithm starting with provided initial guess return solve(f, min, yMin, max, yMax, initial, yInitial); }
public double solve ( final UnivariateRealFunction f , final double min , final double max , final double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifySequence ( min , initial , max ) ; double yInitial = f . value ( initial ) ; if ( Math . abs ( yInitial ) <= functionValueAccuracy ) { setResult ( initial , 0 ) ; return result ; } double yMin = f . value ( min ) ; if ( Math . abs ( yMin ) <= functionValueAccuracy ) { setResult ( yMin , 0 ) ; return result ; } if ( yInitial * yMin < 0 ) { return solve ( f , min , yMin , initial , yInitial , min , yMin ) ; } double yMax = f . value ( max ) ; if ( Math . abs ( yMax ) <= functionValueAccuracy ) { setResult ( yMax , 0 ) ; return result ; } if ( yInitial * yMax < 0 ) { return solve ( f , initial , yInitial , max , yMax , initial , yInitial ) ; } if ( yMin * yMax > 0 ) { throw MathRuntimeException . createIllegalArgumentException ( NON_BRACKETING_MESSAGE , min , max , yMin , yMax ) ; } return solve ( f , min , yMin , max , yMax , initial , yInitial ) ; }
public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { clearResult(); verifySequence(min, initial, max); // return the initial guess if it is good enough double yInitial = f.value(initial); if (Math.abs(yInitial) <= functionValueAccuracy) { setResult(initial, 0); return result; } // return the first endpoint if it is good enough double yMin = f.value(min); if (Math.abs(yMin) <= functionValueAccuracy) { setResult(min, 0); return result; } // reduce interval if min and initial bracket the root if (yInitial * yMin < 0) { return solve(f, min, yMin, initial, yInitial, min, yMin); } // return the second endpoint if it is good enough double yMax = f.value(max); if (Math.abs(yMax) <= functionValueAccuracy) { setResult(max, 0); return result; } // reduce interval if initial and max bracket the root if (yInitial * yMax < 0) { return solve(f, initial, yInitial, max, yMax, initial, yInitial); } if (yMin * yMax > 0) { throw MathRuntimeException.createIllegalArgumentException( NON_BRACKETING_MESSAGE, min, max, yMin, yMax); } // full Brent algorithm starting with provided initial guess return solve(f, min, yMin, max, yMax, initial, yInitial); }
public double solve ( final UnivariateRealFunction f , final double min , final double max , final double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifySequence ( min , initial , max ) ; double yInitial = f . value ( initial ) ; if ( Math . abs ( yInitial ) <= functionValueAccuracy ) { setResult ( initial , 0 ) ; return result ; } double yMin = f . value ( min ) ; if ( Math . abs ( yMin ) <= functionValueAccuracy ) { setResult ( min , 0 ) ; return result ; } if ( yInitial * yMin < 0 ) { return solve ( f , min , yMin , initial , yInitial , min , yMin ) ; } double yMax = f . value ( max ) ; if ( Math . abs ( yMax ) <= functionValueAccuracy ) { setResult ( max , 0 ) ; return result ; } if ( yInitial * yMax < 0 ) { return solve ( f , initial , yInitial , max , yMax , initial , yInitial ) ; } if ( yMin * yMax > 0 ) { throw MathRuntimeException . createIllegalArgumentException ( NON_BRACKETING_MESSAGE , min , max , yMin , yMax ) ; } return solve ( f , min , yMin , max , yMax , initial , yInitial ) ; }
Math
25
src/main/java/org/apache/commons/math3/optimization/fitting/HarmonicFitter.java
257
329
"HarmonicFitter.ParameterGuesser" sometimes fails to return sensible values
The inner class "ParameterGuesser" in "HarmonicFitter" (package "o.a.c.m.optimization.fitting") fails to compute a usable guess for the "amplitude" parameter.
private void guessAOmega() { // initialize the sums for the linear model between the two integrals double sx2 = 0; double sy2 = 0; double sxy = 0; double sxz = 0; double syz = 0; double currentX = observations[0].getX(); double currentY = observations[0].getY(); double f2Integral = 0; double fPrime2Integral = 0; final double startX = currentX; for (int i = 1; i < observations.length; ++i) { // one step forward final double previousX = currentX; final double previousY = currentY; currentX = observations[i].getX(); currentY = observations[i].getY(); // update the integrals of f<sup>2</sup> and f'<sup>2</sup> // considering a linear model for f (and therefore constant f') final double dx = currentX - previousX; final double dy = currentY - previousY; final double f2StepIntegral = dx * (previousY * previousY + previousY * currentY + currentY * currentY) / 3; final double fPrime2StepIntegral = dy * dy / dx; final double x = currentX - startX; f2Integral += f2StepIntegral; fPrime2Integral += fPrime2StepIntegral; sx2 += x * x; sy2 += f2Integral * f2Integral; sxy += x * f2Integral; sxz += x * fPrime2Integral; syz += f2Integral * fPrime2Integral; } // compute the amplitude and pulsation coefficients double c1 = sy2 * sxz - sxy * syz; double c2 = sxy * sxz - sx2 * syz; double c3 = sx2 * sy2 - sxy * sxy; if ((c1 / c2 < 0) || (c2 / c3 < 0)) { final int last = observations.length - 1; // Range of the observations, assuming that the // observations are sorted. final double xRange = observations[last].getX() - observations[0].getX(); if (xRange == 0) { throw new ZeroException(); } omega = 2 * Math.PI / xRange; double yMin = Double.POSITIVE_INFINITY; double yMax = Double.NEGATIVE_INFINITY; for (int i = 1; i < observations.length; ++i) { final double y = observations[i].getY(); if (y < yMin) { yMin = y; } if (y > yMax) { yMax = y; } } a = 0.5 * (yMax - yMin); } else { // In some ill-conditioned cases (cf. MATH-844), the guesser // procedure cannot produce sensible results. a = FastMath.sqrt(c1 / c2); omega = FastMath.sqrt(c2 / c3); } }
private void guessAOmega ( ) { double sx2 = 0 ; double sy2 = 0 ; double sxy = 0 ; double sxz = 0 ; double syz = 0 ; double currentX = observations [ 0 ] . getX ( ) ; double currentY = observations [ 0 ] . getY ( ) ; double f2Integral = 0 ; double fPrime2Integral = 0 ; final double startX = currentX ; for ( int i = 1 ; i < observations . length ; ++ i ) { final double previousX = currentX ; final double previousY = currentY ; currentX = observations [ i ] . getX ( ) ; currentY = observations [ i ] . getY ( ) ; final double dx = currentX - previousX ; final double dy = currentY - previousY ; final double f2StepIntegral = dx * ( previousY * previousY + previousY * currentY + currentY * currentY ) / 3 ; final double fPrime2StepIntegral = dy * dy / dx ; final double x = currentX - startX ; f2Integral += f2StepIntegral ; fPrime2Integral += fPrime2StepIntegral ; sx2 += x * x ; sy2 += f2Integral * f2Integral ; sxy += x * f2Integral ; sxz += x * fPrime2Integral ; syz += f2Integral * fPrime2Integral ; } double c1 = sy2 * sxz - sxy * syz ; double c2 = sxy * sxz - sx2 * syz ; double c3 = sx2 * sy2 - sxy * sxy ; if ( ( c1 / c2 < 0 ) || ( c2 / c3 < 0 ) ) { final int last = observations . length - 1 ; final double xRange = observations [ last ] . getX ( ) - observations [ 0 ] . getX ( ) ; if ( xRange == 0 ) { throw new ZeroException ( ) ; } omega = 2 * Math . PI / xRange ; double yMin = Double . POSITIVE_INFINITY ; double yMax = Double . NEGATIVE_INFINITY ; for ( int i = 1 ; i < observations . length ; ++ i ) { final double y = observations [ i ] . getY ( ) ; if ( y < yMin ) { yMin = y ; } if ( y > yMax ) { yMax = y ; } } a = 0.5 * ( yMax - yMin ) ; } else { a = FastMath . sqrt ( c1 / c2 ) ; omega = FastMath . sqrt ( c2 / c3 ) ; } }
private void guessAOmega() { // initialize the sums for the linear model between the two integrals double sx2 = 0; double sy2 = 0; double sxy = 0; double sxz = 0; double syz = 0; double currentX = observations[0].getX(); double currentY = observations[0].getY(); double f2Integral = 0; double fPrime2Integral = 0; final double startX = currentX; for (int i = 1; i < observations.length; ++i) { // one step forward final double previousX = currentX; final double previousY = currentY; currentX = observations[i].getX(); currentY = observations[i].getY(); // update the integrals of f<sup>2</sup> and f'<sup>2</sup> // considering a linear model for f (and therefore constant f') final double dx = currentX - previousX; final double dy = currentY - previousY; final double f2StepIntegral = dx * (previousY * previousY + previousY * currentY + currentY * currentY) / 3; final double fPrime2StepIntegral = dy * dy / dx; final double x = currentX - startX; f2Integral += f2StepIntegral; fPrime2Integral += fPrime2StepIntegral; sx2 += x * x; sy2 += f2Integral * f2Integral; sxy += x * f2Integral; sxz += x * fPrime2Integral; syz += f2Integral * fPrime2Integral; } // compute the amplitude and pulsation coefficients double c1 = sy2 * sxz - sxy * syz; double c2 = sxy * sxz - sx2 * syz; double c3 = sx2 * sy2 - sxy * sxy; if ((c1 / c2 < 0) || (c2 / c3 < 0)) { final int last = observations.length - 1; // Range of the observations, assuming that the // observations are sorted. final double xRange = observations[last].getX() - observations[0].getX(); if (xRange == 0) { throw new ZeroException(); } omega = 2 * Math.PI / xRange; double yMin = Double.POSITIVE_INFINITY; double yMax = Double.NEGATIVE_INFINITY; for (int i = 1; i < observations.length; ++i) { final double y = observations[i].getY(); if (y < yMin) { yMin = y; } if (y > yMax) { yMax = y; } } a = 0.5 * (yMax - yMin); } else { if (c2 == 0) { // In some ill-conditioned cases (cf. MATH-844), the guesser // procedure cannot produce sensible results. throw new MathIllegalStateException(LocalizedFormats.ZERO_DENOMINATOR); } a = FastMath.sqrt(c1 / c2); omega = FastMath.sqrt(c2 / c3); } }
private void guessAOmega ( ) { double sx2 = 0 ; double sy2 = 0 ; double sxy = 0 ; double sxz = 0 ; double syz = 0 ; double currentX = observations [ 0 ] . getX ( ) ; double currentY = observations [ 0 ] . getY ( ) ; double f2Integral = 0 ; double fPrime2Integral = 0 ; final double startX = currentX ; for ( int i = 1 ; i < observations . length ; ++ i ) { final double previousX = currentX ; final double previousY = currentY ; currentX = observations [ i ] . getX ( ) ; currentY = observations [ i ] . getY ( ) ; final double dx = currentX - previousX ; final double dy = currentY - previousY ; final double f2StepIntegral = dx * ( previousY * previousY + previousY * currentY + currentY * currentY ) / 3 ; final double fPrime2StepIntegral = dy * dy / dx ; final double x = currentX - startX ; f2Integral += f2StepIntegral ; fPrime2Integral += fPrime2StepIntegral ; sx2 += x * x ; sy2 += f2Integral * f2Integral ; sxy += x * f2Integral ; sxz += x * fPrime2Integral ; syz += f2Integral * fPrime2Integral ; } double c1 = sy2 * sxz - sxy * syz ; double c2 = sxy * sxz - sx2 * syz ; double c3 = sx2 * sy2 - sxy * sxy ; if ( ( c1 / c2 < 0 ) || ( c2 / c3 < 0 ) ) { final int last = observations . length - 1 ; final double xRange = observations [ last ] . getX ( ) - observations [ 0 ] . getX ( ) ; if ( xRange == 0 ) { throw new ZeroException ( ) ; } omega = 2 * Math . PI / xRange ; double yMin = Double . POSITIVE_INFINITY ; double yMax = Double . NEGATIVE_INFINITY ; for ( int i = 1 ; i < observations . length ; ++ i ) { final double y = observations [ i ] . getY ( ) ; if ( y < yMin ) { yMin = y ; } if ( y > yMax ) { yMax = y ; } } a = 0.5 * ( yMax - yMin ) ; } else { if ( c2 == 0 ) { throw new MathIllegalStateException ( LocalizedFormats . ZERO_DENOMINATOR ) ; } a = FastMath . sqrt ( c1 / c2 ) ; omega = FastMath . sqrt ( c2 / c3 ) ; } }
Compress
5
src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
191
246
ZipArchiveInputStream doesn't report the end of a truncated archive
If a Zip archive is truncated, (e.g. because it is the first volume in a multi-volume archive) the ZipArchiveInputStream.read() method will not detect that fact. All calls to read() will return 0 bytes read. They will not return -1 (end of stream), nor will they throw any exception (which would seem like a good idea to me because the archive is truncated). I have tracked this problem to ZipArchiveInputStream.java, line 239. It contains a check if (read == 0 && inf.finished()) { return -1; } For truncated archives the read is always zero but the inf is never finished(). I suggest adding two lines below: if (read == 0 && inf.finished()) { return -1; } else if (read == 0 && lengthOfLastRead == -1) { throw new IOException("Truncated ZIP file"); } This solves the problem in my tests.
public int read(byte[] buffer, int start, int length) throws IOException { if (closed) { throw new IOException("The stream is closed"); } if (inf.finished() || current == null) { return -1; } // avoid int overflow, check null buffer if (start <= buffer.length && length >= 0 && start >= 0 && buffer.length - start >= length) { if (current.getMethod() == ZipArchiveOutputStream.STORED) { int csize = (int) current.getSize(); if (readBytesOfEntry >= csize) { return -1; } if (offsetInBuffer >= lengthOfLastRead) { offsetInBuffer = 0; if ((lengthOfLastRead = in.read(buf)) == -1) { return -1; } count(lengthOfLastRead); bytesReadFromStream += lengthOfLastRead; } int toRead = length > lengthOfLastRead ? lengthOfLastRead - offsetInBuffer : length; if ((csize - readBytesOfEntry) < toRead) { toRead = csize - readBytesOfEntry; } System.arraycopy(buf, offsetInBuffer, buffer, start, toRead); offsetInBuffer += toRead; readBytesOfEntry += toRead; crc.update(buffer, start, toRead); return toRead; } if (inf.needsInput()) { fill(); if (lengthOfLastRead > 0) { bytesReadFromStream += lengthOfLastRead; } } int read = 0; try { read = inf.inflate(buffer, start, length); } catch (DataFormatException e) { throw new ZipException(e.getMessage()); } if (read == 0 && inf.finished()) { return -1; } crc.update(buffer, start, read); return read; } throw new ArrayIndexOutOfBoundsException(); }
public int read ( byte [ ] buffer , int start , int length ) throws IOException { if ( closed ) { throw new IOException ( "The stream is closed" ) ; } if ( inf . finished ( ) || current == null ) { return - 1 ; } if ( start <= buffer . length && length >= 0 && start >= 0 && buffer . length - start >= length ) { if ( current . getMethod ( ) == ZipArchiveOutputStream . STORED ) { int csize = ( int ) current . getSize ( ) ; if ( readBytesOfEntry >= csize ) { return - 1 ; } if ( offsetInBuffer >= lengthOfLastRead ) { offsetInBuffer = 0 ; if ( ( lengthOfLastRead = in . read ( buf ) ) == - 1 ) { return - 1 ; } count ( lengthOfLastRead ) ; bytesReadFromStream += lengthOfLastRead ; } int toRead = length > lengthOfLastRead ? lengthOfLastRead - offsetInBuffer : length ; if ( ( csize - readBytesOfEntry ) < toRead ) { toRead = csize - readBytesOfEntry ; } System . arraycopy ( buf , offsetInBuffer , buffer , start , toRead ) ; offsetInBuffer += toRead ; readBytesOfEntry += toRead ; crc . update ( buffer , start , toRead ) ; return toRead ; } if ( inf . needsInput ( ) ) { fill ( ) ; if ( lengthOfLastRead > 0 ) { bytesReadFromStream += lengthOfLastRead ; } } int read = 0 ; try { read = inf . inflate ( buffer , start , length ) ; } catch ( DataFormatException e ) { throw new ZipException ( e . getMessage ( ) ) ; } if ( read == 0 && inf . finished ( ) ) { return - 1 ; } crc . update ( buffer , start , read ) ; return read ; } throw new ArrayIndexOutOfBoundsException ( ) ; }
public int read(byte[] buffer, int start, int length) throws IOException { if (closed) { throw new IOException("The stream is closed"); } if (inf.finished() || current == null) { return -1; } // avoid int overflow, check null buffer if (start <= buffer.length && length >= 0 && start >= 0 && buffer.length - start >= length) { if (current.getMethod() == ZipArchiveOutputStream.STORED) { int csize = (int) current.getSize(); if (readBytesOfEntry >= csize) { return -1; } if (offsetInBuffer >= lengthOfLastRead) { offsetInBuffer = 0; if ((lengthOfLastRead = in.read(buf)) == -1) { return -1; } count(lengthOfLastRead); bytesReadFromStream += lengthOfLastRead; } int toRead = length > lengthOfLastRead ? lengthOfLastRead - offsetInBuffer : length; if ((csize - readBytesOfEntry) < toRead) { toRead = csize - readBytesOfEntry; } System.arraycopy(buf, offsetInBuffer, buffer, start, toRead); offsetInBuffer += toRead; readBytesOfEntry += toRead; crc.update(buffer, start, toRead); return toRead; } if (inf.needsInput()) { fill(); if (lengthOfLastRead > 0) { bytesReadFromStream += lengthOfLastRead; } } int read = 0; try { read = inf.inflate(buffer, start, length); } catch (DataFormatException e) { throw new ZipException(e.getMessage()); } if (read == 0) { if (inf.finished()) { return -1; } else if (lengthOfLastRead == -1) { throw new IOException("Truncated ZIP file"); } } crc.update(buffer, start, read); return read; } throw new ArrayIndexOutOfBoundsException(); }
public int read ( byte [ ] buffer , int start , int length ) throws IOException { if ( closed ) { throw new IOException ( "The stream is closed" ) ; } if ( inf . finished ( ) || current == null ) { return - 1 ; } if ( start <= buffer . length && length >= 0 && start >= 0 && buffer . length - start >= length ) { if ( current . getMethod ( ) == ZipArchiveOutputStream . STORED ) { int csize = ( int ) current . getSize ( ) ; if ( readBytesOfEntry >= csize ) { return - 1 ; } if ( offsetInBuffer >= lengthOfLastRead ) { offsetInBuffer = 0 ; if ( ( lengthOfLastRead = in . read ( buf ) ) == - 1 ) { return - 1 ; } count ( lengthOfLastRead ) ; bytesReadFromStream += lengthOfLastRead ; } int toRead = length > lengthOfLastRead ? lengthOfLastRead - offsetInBuffer : length ; if ( ( csize - readBytesOfEntry ) < toRead ) { toRead = csize - readBytesOfEntry ; } System . arraycopy ( buf , offsetInBuffer , buffer , start , toRead ) ; offsetInBuffer += toRead ; readBytesOfEntry += toRead ; crc . update ( buffer , start , toRead ) ; return toRead ; } if ( inf . needsInput ( ) ) { fill ( ) ; if ( lengthOfLastRead > 0 ) { bytesReadFromStream += lengthOfLastRead ; } } int read = 0 ; try { read = inf . inflate ( buffer , start , length ) ; } catch ( DataFormatException e ) { throw new ZipException ( e . getMessage ( ) ) ; } if ( read == 0 ) { if ( inf . finished ( ) ) { return - 1 ; } else if ( lengthOfLastRead == - 1 ) { throw new IOException ( "Truncated ZIP file" ) ; } } crc . update ( buffer , start , read ) ; return read ; } throw new ArrayIndexOutOfBoundsException ( ) ; }
Cli
24
src/java/org/apache/commons/cli/HelpFormatter.java
809
852
infinite loop in the wrapping code of HelpFormatter
If there is not enough space to display a word on a single line, HelpFormatter goes into a infinite loops until the JVM crashes with an OutOfMemoryError. Test case: {code} Options options = new Options(); options.addOption("h", "help", false, "This is a looooong description"); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(20); formatter.printHelp("app", options); // hang & crash {code} An helpful exception indicating the insufficient width would be more appropriate than an OutOfMemoryError.
protected StringBuffer renderWrappedText(StringBuffer sb, int width, int nextLineTabStop, String text) { int pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(rtrim(text)); return sb; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); if (nextLineTabStop >= width) { // stops infinite loop happening throw new IllegalStateException("Total width is less than the width of the argument and indent " + "- no room for the description"); } // all following lines must be padded with nextLineTabStop space // characters final String padding = createPadding(nextLineTabStop); while (true) { text = padding + text.substring(pos).trim(); pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(text); return sb; } if ( (text.length() > width) && (pos == nextLineTabStop - 1) ) { pos = width; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); } }
protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; if ( nextLineTabStop >= width ) { throw new IllegalStateException ( "Total width is less than the width of the argument and indent " + "- no room for the description" ) ; } final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } if ( ( text . length ( ) > width ) && ( pos == nextLineTabStop - 1 ) ) { pos = width ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } }
protected StringBuffer renderWrappedText(StringBuffer sb, int width, int nextLineTabStop, String text) { int pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(rtrim(text)); return sb; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); if (nextLineTabStop >= width) { // stops infinite loop happening nextLineTabStop = width - 1; } // all following lines must be padded with nextLineTabStop space // characters final String padding = createPadding(nextLineTabStop); while (true) { text = padding + text.substring(pos).trim(); pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(text); return sb; } if ( (text.length() > width) && (pos == nextLineTabStop - 1) ) { pos = width; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); } }
protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; if ( nextLineTabStop >= width ) { nextLineTabStop = width - 1 ; } final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } if ( ( text . length ( ) > width ) && ( pos == nextLineTabStop - 1 ) ) { pos = width ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } }
Cli
2
src/java/org/apache/commons/cli/PosixParser.java
278
308
[cli] Parameter value "-something" misinterpreted as a parameter
If a parameter value is passed that contains a hyphen as the (delimited) first character, CLI parses this a parameter. For example using the call java myclass -t "-something" Results in the parser creating the invalid parameter -o (noting that it is skipping the 's') My code is using the Posix parser as follows Options options = buildCommandLineOptions(); CommandLineParser parser = new PosixParser(); CommandLine commandLine = null; try { commandLine = parser.parse(options, args); } catch (ParseException e) { System.out.println("Invalid parameters. " + e.getMessage() + NEW_LINE); System.exit(EXIT_CODE_ERROR); } This has been tested against the nightly build dated 20050503.
protected void burstToken(String token, boolean stopAtNonOption) { int tokenLength = token.length(); for (int i = 1; i < tokenLength; i++) { String ch = String.valueOf(token.charAt(i)); boolean hasOption = options.hasOption(ch); if (hasOption) { tokens.add("-" + ch); currentOption = options.getOption(ch); if (currentOption.hasArg() && (token.length() != (i + 1))) { tokens.add(token.substring(i + 1)); break; } } else if (stopAtNonOption) { process(token.substring(i)); } else { tokens.add("-" + ch); } } }
protected void burstToken ( String token , boolean stopAtNonOption ) { int tokenLength = token . length ( ) ; for ( int i = 1 ; i < tokenLength ; i ++ ) { String ch = String . valueOf ( token . charAt ( i ) ) ; boolean hasOption = options . hasOption ( ch ) ; if ( hasOption ) { tokens . add ( "-" + ch ) ; currentOption = options . getOption ( ch ) ; if ( currentOption . hasArg ( ) && ( token . length ( ) != ( i + 1 ) ) ) { tokens . add ( token . substring ( i + 1 ) ) ; break ; } } else if ( stopAtNonOption ) { process ( token . substring ( i ) ) ; } else { tokens . add ( "-" + ch ) ; } } }
protected void burstToken(String token, boolean stopAtNonOption) { int tokenLength = token.length(); for (int i = 1; i < tokenLength; i++) { String ch = String.valueOf(token.charAt(i)); boolean hasOption = options.hasOption(ch); if (hasOption) { tokens.add("-" + ch); currentOption = options.getOption(ch); if (currentOption.hasArg() && (token.length() != (i + 1))) { tokens.add(token.substring(i + 1)); break; } } else if (stopAtNonOption) { process(token.substring(i)); } else { tokens.add(token); break; } } }
protected void burstToken ( String token , boolean stopAtNonOption ) { int tokenLength = token . length ( ) ; for ( int i = 1 ; i < tokenLength ; i ++ ) { String ch = String . valueOf ( token . charAt ( i ) ) ; boolean hasOption = options . hasOption ( ch ) ; if ( hasOption ) { tokens . add ( "-" + ch ) ; currentOption = options . getOption ( ch ) ; if ( currentOption . hasArg ( ) && ( token . length ( ) != ( i + 1 ) ) ) { tokens . add ( token . substring ( i + 1 ) ) ; break ; } } else if ( stopAtNonOption ) { process ( token . substring ( i ) ) ; } else { tokens . add ( token ) ; break ; } } }
JacksonDatabind
17
src/main/java/com/fasterxml/jackson/databind/ObjectMapper.java
167
193
readTree does not work with defaultTyping enabled but no type info provided
I have enabled `defaultTyping`, and serialized `Foo` entity with no type info. I'm trying to read json as a tree with `mapper.readTree(json)`, and it throws an exception ``` java Exception in thread "main" com.fasterxml.jackson.databind.JsonMappingException: Unexpected token (START_OBJECT), expected START_ARRAY: need JSON Array to contain As.WRAPPER_ARRAY type information for class com.fasterxml.jackson.databind.JsonNode at [Source: { "bar" : "bar" }; line: 1, column: 1] at com.fasterxml.jackson.databind.JsonMappingException.from(JsonMappingException.java:148) at com.fasterxml.jackson.databind.DeserializationContext.wrongTokenException(DeserializationContext.java:927) at com.fasterxml.jackson.databind.jsontype.impl.AsArrayTypeDeserializer._locateTypeId(AsArrayTypeDeserializer.java:127) at com.fasterxml.jackson.databind.jsontype.impl.AsArrayTypeDeserializer._deserialize(AsArrayTypeDeserializer.java:93) at com.fasterxml.jackson.databind.jsontype.impl.AsArrayTypeDeserializer.deserializeTypedFromAny(AsArrayTypeDeserializer.java:68) at com.fasterxml.jackson.databind.deser.std.BaseNodeDeserializer.deserializeWithType(JsonNodeDeserializer.java:144) at com.fasterxml.jackson.databind.deser.std.JsonNodeDeserializer.deserializeWithType(JsonNodeDeserializer.java:14) at com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer.deserialize(TypeWrappedDeserializer.java:42) at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3562) at com.fasterxml.jackson.databind.ObjectMapper.readTree(ObjectMapper.java:2136) at test.App.main(App.java:23) ``` However, if I disable `defaultTyping`, the same code works fine. So, `readTree(json)` does not actually need type info for the root element, because it works when `defaultTyping` is disabled (i.e. `{"bar" : "bar"}`), but it throws the exception when `defaultTyping` is enabled, that's why it looks like a bug. The same thing happens for `valueToTree(foo)`. Jackson version is `2.5.3` Full code is provided. ``` java import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import java.io.IOException; public class App { public static void main(String[] args) throws IOException { ObjectMapper mapper = new ObjectMapper() .enableDefaultTyping() // works fine with disableDefaultTyping() .enable(MapperFeature.AUTO_DETECT_GETTERS) .enable(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS) .disable(MapperFeature.USE_GETTERS_AS_SETTERS) .disable(MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS) .enable(SerializationFeature.INDENT_OUTPUT) .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS); Foo foo = new Foo("bar"); String serialized = mapper.writeValueAsString(foo); // {"bar" : "bar"} JsonNode jsonNode = mapper.readTree(serialized); // exception here JsonNode node = mapper.valueToTree(foo); // and here } public static class Foo { private String bar; public Foo() { } public Foo(String bar) { this.bar = bar; } public String getBar() { return bar; } public void setBar(String bar) { this.bar = bar; } } } ```
public boolean useForType(JavaType t) { switch (_appliesFor) { case NON_CONCRETE_AND_ARRAYS: while (t.isArrayType()) { t = t.getContentType(); } // fall through case OBJECT_AND_NON_CONCRETE: // return t.isJavaLangObject() || return (t.getRawClass() == Object.class) || (!t.isConcrete() // [databind#88] Should not apply to JSON tree models: || TreeNode.class.isAssignableFrom(t.getRawClass())); case NON_FINAL: while (t.isArrayType()) { t = t.getContentType(); } // [Issue#88] Should not apply to JSON tree models: return !t.isFinal() && !TreeNode.class.isAssignableFrom(t.getRawClass()); default: //case JAVA_LANG_OBJECT: // return t.isJavaLangObject(); return (t.getRawClass() == Object.class); } }
public boolean useForType ( JavaType t ) { switch ( _appliesFor ) { case NON_CONCRETE_AND_ARRAYS : while ( t . isArrayType ( ) ) { t = t . getContentType ( ) ; } case OBJECT_AND_NON_CONCRETE : return ( t . getRawClass ( ) == Object . class ) || ( ! t . isConcrete ( ) || TreeNode . class . isAssignableFrom ( t . getRawClass ( ) ) ) ; case NON_FINAL : while ( t . isArrayType ( ) ) { t = t . getContentType ( ) ; } return ! t . isFinal ( ) && ! TreeNode . class . isAssignableFrom ( t . getRawClass ( ) ) ; default : return ( t . getRawClass ( ) == Object . class ) ; } }
public boolean useForType(JavaType t) { switch (_appliesFor) { case NON_CONCRETE_AND_ARRAYS: while (t.isArrayType()) { t = t.getContentType(); } // fall through case OBJECT_AND_NON_CONCRETE: // return t.isJavaLangObject() || return (t.getRawClass() == Object.class) || (!t.isConcrete() // [databind#88] Should not apply to JSON tree models: && !TreeNode.class.isAssignableFrom(t.getRawClass())); case NON_FINAL: while (t.isArrayType()) { t = t.getContentType(); } // [Issue#88] Should not apply to JSON tree models: return !t.isFinal() && !TreeNode.class.isAssignableFrom(t.getRawClass()); default: //case JAVA_LANG_OBJECT: // return t.isJavaLangObject(); return (t.getRawClass() == Object.class); } }
public boolean useForType ( JavaType t ) { switch ( _appliesFor ) { case NON_CONCRETE_AND_ARRAYS : while ( t . isArrayType ( ) ) { t = t . getContentType ( ) ; } case OBJECT_AND_NON_CONCRETE : return ( t . getRawClass ( ) == Object . class ) || ( ! t . isConcrete ( ) && ! TreeNode . class . isAssignableFrom ( t . getRawClass ( ) ) ) ; case NON_FINAL : while ( t . isArrayType ( ) ) { t = t . getContentType ( ) ; } return ! t . isFinal ( ) && ! TreeNode . class . isAssignableFrom ( t . getRawClass ( ) ) ; default : return ( t . getRawClass ( ) == Object . class ) ; } }
Math
33
src/main/java/org/apache/commons/math3/optimization/linear/SimplexTableau.java
327
367
SimplexSolver gives bad results
Methode SimplexSolver.optimeze(...) gives bad results with commons-math3-3.0 in a simple test problem. It works well in commons-math-2.2.
protected void dropPhase1Objective() { if (getNumObjectiveFunctions() == 1) { return; } List<Integer> columnsToDrop = new ArrayList<Integer>(); columnsToDrop.add(0); // positive cost non-artificial variables for (int i = getNumObjectiveFunctions(); i < getArtificialVariableOffset(); i++) { final double entry = tableau.getEntry(0, i); if (Precision.compareTo(entry, 0d, maxUlps) > 0) { columnsToDrop.add(i); } } // non-basic artificial variables for (int i = 0; i < getNumArtificialVariables(); i++) { int col = i + getArtificialVariableOffset(); if (getBasicRow(col) == null) { columnsToDrop.add(col); } } double[][] matrix = new double[getHeight() - 1][getWidth() - columnsToDrop.size()]; for (int i = 1; i < getHeight(); i++) { int col = 0; for (int j = 0; j < getWidth(); j++) { if (!columnsToDrop.contains(j)) { matrix[i - 1][col++] = tableau.getEntry(i, j); } } } for (int i = columnsToDrop.size() - 1; i >= 0; i--) { columnLabels.remove((int) columnsToDrop.get(i)); } this.tableau = new Array2DRowRealMatrix(matrix); this.numArtificialVariables = 0; }
protected void dropPhase1Objective ( ) { if ( getNumObjectiveFunctions ( ) == 1 ) { return ; } List < Integer > columnsToDrop = new ArrayList < Integer > ( ) ; columnsToDrop . add ( 0 ) ; for ( int i = getNumObjectiveFunctions ( ) ; i < getArtificialVariableOffset ( ) ; i ++ ) { final double entry = tableau . getEntry ( 0 , i ) ; if ( Precision . compareTo ( entry , 0d , maxUlps ) > 0 ) { columnsToDrop . add ( i ) ; } } for ( int i = 0 ; i < getNumArtificialVariables ( ) ; i ++ ) { int col = i + getArtificialVariableOffset ( ) ; if ( getBasicRow ( col ) == null ) { columnsToDrop . add ( col ) ; } } double [ ] [ ] matrix = new double [ getHeight ( ) - 1 ] [ getWidth ( ) - columnsToDrop . size ( ) ] ; for ( int i = 1 ; i < getHeight ( ) ; i ++ ) { int col = 0 ; for ( int j = 0 ; j < getWidth ( ) ; j ++ ) { if ( ! columnsToDrop . contains ( j ) ) { matrix [ i - 1 ] [ col ++ ] = tableau . getEntry ( i , j ) ; } } } for ( int i = columnsToDrop . size ( ) - 1 ; i >= 0 ; i -- ) { columnLabels . remove ( ( int ) columnsToDrop . get ( i ) ) ; } this . tableau = new Array2DRowRealMatrix ( matrix ) ; this . numArtificialVariables = 0 ; }
protected void dropPhase1Objective() { if (getNumObjectiveFunctions() == 1) { return; } List<Integer> columnsToDrop = new ArrayList<Integer>(); columnsToDrop.add(0); // positive cost non-artificial variables for (int i = getNumObjectiveFunctions(); i < getArtificialVariableOffset(); i++) { final double entry = tableau.getEntry(0, i); if (Precision.compareTo(entry, 0d, epsilon) > 0) { columnsToDrop.add(i); } } // non-basic artificial variables for (int i = 0; i < getNumArtificialVariables(); i++) { int col = i + getArtificialVariableOffset(); if (getBasicRow(col) == null) { columnsToDrop.add(col); } } double[][] matrix = new double[getHeight() - 1][getWidth() - columnsToDrop.size()]; for (int i = 1; i < getHeight(); i++) { int col = 0; for (int j = 0; j < getWidth(); j++) { if (!columnsToDrop.contains(j)) { matrix[i - 1][col++] = tableau.getEntry(i, j); } } } for (int i = columnsToDrop.size() - 1; i >= 0; i--) { columnLabels.remove((int) columnsToDrop.get(i)); } this.tableau = new Array2DRowRealMatrix(matrix); this.numArtificialVariables = 0; }
protected void dropPhase1Objective ( ) { if ( getNumObjectiveFunctions ( ) == 1 ) { return ; } List < Integer > columnsToDrop = new ArrayList < Integer > ( ) ; columnsToDrop . add ( 0 ) ; for ( int i = getNumObjectiveFunctions ( ) ; i < getArtificialVariableOffset ( ) ; i ++ ) { final double entry = tableau . getEntry ( 0 , i ) ; if ( Precision . compareTo ( entry , 0d , epsilon ) > 0 ) { columnsToDrop . add ( i ) ; } } for ( int i = 0 ; i < getNumArtificialVariables ( ) ; i ++ ) { int col = i + getArtificialVariableOffset ( ) ; if ( getBasicRow ( col ) == null ) { columnsToDrop . add ( col ) ; } } double [ ] [ ] matrix = new double [ getHeight ( ) - 1 ] [ getWidth ( ) - columnsToDrop . size ( ) ] ; for ( int i = 1 ; i < getHeight ( ) ; i ++ ) { int col = 0 ; for ( int j = 0 ; j < getWidth ( ) ; j ++ ) { if ( ! columnsToDrop . contains ( j ) ) { matrix [ i - 1 ] [ col ++ ] = tableau . getEntry ( i , j ) ; } } } for ( int i = columnsToDrop . size ( ) - 1 ; i >= 0 ; i -- ) { columnLabels . remove ( ( int ) columnsToDrop . get ( i ) ) ; } this . tableau = new Array2DRowRealMatrix ( matrix ) ; this . numArtificialVariables = 0 ; }
JacksonDatabind
112
src/main/java/com/fasterxml/jackson/databind/deser/std/StringCollectionDeserializer.java
99
134
`StringCollectionDeserializer` fails with custom collection
Seeing this with Jackson 2.9.8. We have a custom collection implementation, which is wired to use its "immutable" version for deserialization. The rationale is that we don't want accidental modifications to the data structures that come from the wire, so they all are forced to be immutable. After upgrade from 2.6.3 to 2.9.8, the deserialization started breaking with the message: >Cannot construct instance of `XXX` (although at least one Creator exists): no default no-arguments constructor found This happens ONLY when you deserialize a custom collection of strings as a property of the other object. Deserializing the custom collection of strings directly works fine, and so does the deserialization of custom collection of non-strings. I believe either the `StringCollectionDeserializer` should not be invoked for custom collections, or perhaps it does not handle the delegation as expected. Please see comments for repro and workaround. Thanks!
@Override public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { // May need to resolve types for delegate-based creators: JsonDeserializer<Object> delegate = null; if (_valueInstantiator != null) { // [databind#2324]: check both array-delegating and delegating AnnotatedWithParams delegateCreator = _valueInstantiator.getDelegateCreator(); if (delegateCreator != null) { JavaType delegateType = _valueInstantiator.getDelegateType(ctxt.getConfig()); delegate = findDeserializer(ctxt, delegateType, property); } } JsonDeserializer<?> valueDeser = _valueDeserializer; final JavaType valueType = _containerType.getContentType(); if (valueDeser == null) { // [databind#125]: May have a content converter valueDeser = findConvertingContentDeserializer(ctxt, property, valueDeser); if (valueDeser == null) { // And we may also need to get deserializer for String valueDeser = ctxt.findContextualValueDeserializer(valueType, property); } } else { // if directly assigned, probably not yet contextual, so: valueDeser = ctxt.handleSecondaryContextualization(valueDeser, property, valueType); } // 11-Dec-2015, tatu: Should we pass basic `Collection.class`, or more refined? Mostly // comes down to "List vs Collection" I suppose... for now, pass Collection Boolean unwrapSingle = findFormatFeature(ctxt, property, Collection.class, JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY); NullValueProvider nuller = findContentNullProvider(ctxt, property, valueDeser); if (isDefaultDeserializer(valueDeser)) { valueDeser = null; } return withResolved(delegate, valueDeser, nuller, unwrapSingle); }
@ Override public JsonDeserializer < ? > createContextual ( DeserializationContext ctxt , BeanProperty property ) throws JsonMappingException { JsonDeserializer < Object > delegate = null ; if ( _valueInstantiator != null ) { AnnotatedWithParams delegateCreator = _valueInstantiator . getDelegateCreator ( ) ; if ( delegateCreator != null ) { JavaType delegateType = _valueInstantiator . getDelegateType ( ctxt . getConfig ( ) ) ; delegate = findDeserializer ( ctxt , delegateType , property ) ; } } JsonDeserializer < ? > valueDeser = _valueDeserializer ; final JavaType valueType = _containerType . getContentType ( ) ; if ( valueDeser == null ) { valueDeser = findConvertingContentDeserializer ( ctxt , property , valueDeser ) ; if ( valueDeser == null ) { valueDeser = ctxt . findContextualValueDeserializer ( valueType , property ) ; } } else { valueDeser = ctxt . handleSecondaryContextualization ( valueDeser , property , valueType ) ; } Boolean unwrapSingle = findFormatFeature ( ctxt , property , Collection . class , JsonFormat . Feature . ACCEPT_SINGLE_VALUE_AS_ARRAY ) ; NullValueProvider nuller = findContentNullProvider ( ctxt , property , valueDeser ) ; if ( isDefaultDeserializer ( valueDeser ) ) { valueDeser = null ; } return withResolved ( delegate , valueDeser , nuller , unwrapSingle ) ; }
@Override public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { // May need to resolve types for delegate-based creators: JsonDeserializer<Object> delegate = null; if (_valueInstantiator != null) { // [databind#2324]: check both array-delegating and delegating AnnotatedWithParams delegateCreator = _valueInstantiator.getArrayDelegateCreator(); if (delegateCreator != null) { JavaType delegateType = _valueInstantiator.getArrayDelegateType(ctxt.getConfig()); delegate = findDeserializer(ctxt, delegateType, property); } else if ((delegateCreator = _valueInstantiator.getDelegateCreator()) != null) { JavaType delegateType = _valueInstantiator.getDelegateType(ctxt.getConfig()); delegate = findDeserializer(ctxt, delegateType, property); } } JsonDeserializer<?> valueDeser = _valueDeserializer; final JavaType valueType = _containerType.getContentType(); if (valueDeser == null) { // [databind#125]: May have a content converter valueDeser = findConvertingContentDeserializer(ctxt, property, valueDeser); if (valueDeser == null) { // And we may also need to get deserializer for String valueDeser = ctxt.findContextualValueDeserializer(valueType, property); } } else { // if directly assigned, probably not yet contextual, so: valueDeser = ctxt.handleSecondaryContextualization(valueDeser, property, valueType); } // 11-Dec-2015, tatu: Should we pass basic `Collection.class`, or more refined? Mostly // comes down to "List vs Collection" I suppose... for now, pass Collection Boolean unwrapSingle = findFormatFeature(ctxt, property, Collection.class, JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY); NullValueProvider nuller = findContentNullProvider(ctxt, property, valueDeser); if (isDefaultDeserializer(valueDeser)) { valueDeser = null; } return withResolved(delegate, valueDeser, nuller, unwrapSingle); }
@ Override public JsonDeserializer < ? > createContextual ( DeserializationContext ctxt , BeanProperty property ) throws JsonMappingException { JsonDeserializer < Object > delegate = null ; if ( _valueInstantiator != null ) { AnnotatedWithParams delegateCreator = _valueInstantiator . getArrayDelegateCreator ( ) ; if ( delegateCreator != null ) { JavaType delegateType = _valueInstantiator . getArrayDelegateType ( ctxt . getConfig ( ) ) ; delegate = findDeserializer ( ctxt , delegateType , property ) ; } else if ( ( delegateCreator = _valueInstantiator . getDelegateCreator ( ) ) != null ) { JavaType delegateType = _valueInstantiator . getDelegateType ( ctxt . getConfig ( ) ) ; delegate = findDeserializer ( ctxt , delegateType , property ) ; } } JsonDeserializer < ? > valueDeser = _valueDeserializer ; final JavaType valueType = _containerType . getContentType ( ) ; if ( valueDeser == null ) { valueDeser = findConvertingContentDeserializer ( ctxt , property , valueDeser ) ; if ( valueDeser == null ) { valueDeser = ctxt . findContextualValueDeserializer ( valueType , property ) ; } } else { valueDeser = ctxt . handleSecondaryContextualization ( valueDeser , property , valueType ) ; } Boolean unwrapSingle = findFormatFeature ( ctxt , property , Collection . class , JsonFormat . Feature . ACCEPT_SINGLE_VALUE_AS_ARRAY ) ; NullValueProvider nuller = findContentNullProvider ( ctxt , property , valueDeser ) ; if ( isDefaultDeserializer ( valueDeser ) ) { valueDeser = null ; } return withResolved ( delegate , valueDeser , nuller , unwrapSingle ) ; }
Cli
32
src/main/java/org/apache/commons/cli/HelpFormatter.java
902
943
StringIndexOutOfBoundsException in HelpFormatter.findWrapPos
In the last while loop in HelpFormatter.findWrapPos, it can pass text.length() to text.charAt(int), which throws a StringIndexOutOfBoundsException. The first expression in that while loop condition should use a <, not a <=. This is on line 908 in r779646: http://svn.apache.org/viewvc/commons/proper/cli/trunk/src/java/org/apache/commons/cli/HelpFormatter.java?revision=779646&view=markup
protected int findWrapPos(String text, int width, int startPos) { int pos; // the line ends before the max wrap pos or a new line char found if (((pos = text.indexOf('\n', startPos)) != -1 && pos <= width) || ((pos = text.indexOf('\t', startPos)) != -1 && pos <= width)) { return pos + 1; } else if (startPos + width >= text.length()) { return -1; } // look for the last whitespace character before startPos+width pos = startPos + width; char c; while ((pos >= startPos) && ((c = text.charAt(pos)) != ' ') && (c != '\n') && (c != '\r')) { --pos; } // if we found it - just return if (pos > startPos) { return pos; } // if we didn't find one, simply chop at startPos+width pos = startPos + width; while ((pos <= text.length()) && ((c = text.charAt(pos)) != ' ') && (c != '\n') && (c != '\r')) { ++pos; } return pos == text.length() ? -1 : pos; }
protected int findWrapPos ( String text , int width , int startPos ) { int pos ; if ( ( ( pos = text . indexOf ( '\n' , startPos ) ) != - 1 && pos <= width ) || ( ( pos = text . indexOf ( '\t' , startPos ) ) != - 1 && pos <= width ) ) { return pos + 1 ; } else if ( startPos + width >= text . length ( ) ) { return - 1 ; } pos = startPos + width ; char c ; while ( ( pos >= startPos ) && ( ( c = text . charAt ( pos ) ) != ' ' ) && ( c != '\n' ) && ( c != '\r' ) ) { -- pos ; } if ( pos > startPos ) { return pos ; } pos = startPos + width ; while ( ( pos <= text . length ( ) ) && ( ( c = text . charAt ( pos ) ) != ' ' ) && ( c != '\n' ) && ( c != '\r' ) ) { ++ pos ; } return pos == text . length ( ) ? - 1 : pos ; }
protected int findWrapPos(String text, int width, int startPos) { int pos; // the line ends before the max wrap pos or a new line char found if (((pos = text.indexOf('\n', startPos)) != -1 && pos <= width) || ((pos = text.indexOf('\t', startPos)) != -1 && pos <= width)) { return pos + 1; } else if (startPos + width >= text.length()) { return -1; } // look for the last whitespace character before startPos+width pos = startPos + width; char c; while ((pos >= startPos) && ((c = text.charAt(pos)) != ' ') && (c != '\n') && (c != '\r')) { --pos; } // if we found it - just return if (pos > startPos) { return pos; } // if we didn't find one, simply chop at startPos+width pos = startPos + width; return pos == text.length() ? -1 : pos; }
protected int findWrapPos ( String text , int width , int startPos ) { int pos ; if ( ( ( pos = text . indexOf ( '\n' , startPos ) ) != - 1 && pos <= width ) || ( ( pos = text . indexOf ( '\t' , startPos ) ) != - 1 && pos <= width ) ) { return pos + 1 ; } else if ( startPos + width >= text . length ( ) ) { return - 1 ; } pos = startPos + width ; char c ; while ( ( pos >= startPos ) && ( ( c = text . charAt ( pos ) ) != ' ' ) && ( c != '\n' ) && ( c != '\r' ) ) { -- pos ; } if ( pos > startPos ) { return pos ; } pos = startPos + width ; return pos == text . length ( ) ? - 1 : pos ; }
JacksonCore
20
src/main/java/com/fasterxml/jackson/core/JsonGenerator.java
1328
1332
Add support for writing `byte[]` via `JsonGenerator.writeEmbeddedObject()`
(note: should be safe for patch, that is, 2.8.3) Default implementation of 2.8-added `writeEmbeddedObject()` throws exception (unsupported operation) for all values, since JSON does not have any native object types. This is different from handling of `writeObject()`, which tries to either delegate to `ObjectCodec` (if one registered), or even encode "simple" values. However: since support for binary data is already handled in some cases using `VALUE_EMBEDDED_OBJECT`, it would actually make sense to handle case of `byte[]` (and, if feasible, perhaps `ByteBuffer` for extra points), and also ensure `null` can be written. This is likely necessary to support https://github.com/FasterXML/jackson-databind/issues/1361 and should in general make system more robust.
public void writeEmbeddedObject(Object object) throws IOException { // 01-Sep-2016, tatu: As per [core#318], handle small number of cases throw new JsonGenerationException("No native support for writing embedded objects", this); }
public void writeEmbeddedObject ( Object object ) throws IOException { throw new JsonGenerationException ( "No native support for writing embedded objects" , this ) ; }
public void writeEmbeddedObject(Object object) throws IOException { // 01-Sep-2016, tatu: As per [core#318], handle small number of cases if (object == null) { writeNull(); return; } if (object instanceof byte[]) { writeBinary((byte[]) object); return; } throw new JsonGenerationException("No native support for writing embedded objects of type " +object.getClass().getName(), this); }
public void writeEmbeddedObject ( Object object ) throws IOException { if ( object == null ) { writeNull ( ) ; return ; } if ( object instanceof byte [ ] ) { writeBinary ( ( byte [ ] ) object ) ; return ; } throw new JsonGenerationException ( "No native support for writing embedded objects of type " + object . getClass ( ) . getName ( ) , this ) ; }
Math
64
src/main/java/org/apache/commons/math/optimization/general/LevenbergMarquardtOptimizer.java
240
464
Inconsistent result from Levenberg-Marquardt
Levenberg-Marquardt (its method doOptimize) returns a VectorialPointValuePair. However, the class holds the optimum point, the vector of the objective function, the cost and residuals. The value returns by doOptimize does not always corresponds to the point which leads to the residuals and cost
@Override protected VectorialPointValuePair doOptimize() throws FunctionEvaluationException, OptimizationException, IllegalArgumentException { // arrays shared with the other private methods solvedCols = Math.min(rows, cols); diagR = new double[cols]; jacNorm = new double[cols]; beta = new double[cols]; permutation = new int[cols]; lmDir = new double[cols]; // local point double delta = 0; double xNorm = 0; double[] diag = new double[cols]; double[] oldX = new double[cols]; double[] oldRes = new double[rows]; double[] work1 = new double[cols]; double[] work2 = new double[cols]; double[] work3 = new double[cols]; // evaluate the function at the starting point and calculate its norm updateResidualsAndCost(); // outer loop lmPar = 0; boolean firstIteration = true; VectorialPointValuePair current = new VectorialPointValuePair(point, objective); while (true) { incrementIterationsCounter(); // compute the Q.R. decomposition of the jacobian matrix VectorialPointValuePair previous = current; updateJacobian(); qrDecomposition(); // compute Qt.res qTy(residuals); // now we don't need Q anymore, // so let jacobian contain the R matrix with its diagonal elements for (int k = 0; k < solvedCols; ++k) { int pk = permutation[k]; jacobian[k][pk] = diagR[pk]; } if (firstIteration) { // scale the point according to the norms of the columns // of the initial jacobian xNorm = 0; for (int k = 0; k < cols; ++k) { double dk = jacNorm[k]; if (dk == 0) { dk = 1.0; } double xk = dk * point[k]; xNorm += xk * xk; diag[k] = dk; } xNorm = Math.sqrt(xNorm); // initialize the step bound delta delta = (xNorm == 0) ? initialStepBoundFactor : (initialStepBoundFactor * xNorm); } // check orthogonality between function vector and jacobian columns double maxCosine = 0; if (cost != 0) { for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; double s = jacNorm[pj]; if (s != 0) { double sum = 0; for (int i = 0; i <= j; ++i) { sum += jacobian[i][pj] * residuals[i]; } maxCosine = Math.max(maxCosine, Math.abs(sum) / (s * cost)); } } } if (maxCosine <= orthoTolerance) { // convergence has been reached return current; } // rescale if necessary for (int j = 0; j < cols; ++j) { diag[j] = Math.max(diag[j], jacNorm[j]); } // inner loop for (double ratio = 0; ratio < 1.0e-4;) { // save the state for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; oldX[pj] = point[pj]; } double previousCost = cost; double[] tmpVec = residuals; residuals = oldRes; oldRes = tmpVec; // determine the Levenberg-Marquardt parameter determineLMParameter(oldRes, delta, diag, work1, work2, work3); // compute the new point and the norm of the evolution direction double lmNorm = 0; for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; lmDir[pj] = -lmDir[pj]; point[pj] = oldX[pj] + lmDir[pj]; double s = diag[pj] * lmDir[pj]; lmNorm += s * s; } lmNorm = Math.sqrt(lmNorm); // on the first iteration, adjust the initial step bound. if (firstIteration) { delta = Math.min(delta, lmNorm); } // evaluate the function at x + p and calculate its norm updateResidualsAndCost(); current = new VectorialPointValuePair(point, objective); // compute the scaled actual reduction double actRed = -1.0; if (0.1 * cost < previousCost) { double r = cost / previousCost; actRed = 1.0 - r * r; } // compute the scaled predicted reduction // and the scaled directional derivative for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; double dirJ = lmDir[pj]; work1[j] = 0; for (int i = 0; i <= j; ++i) { work1[i] += jacobian[i][pj] * dirJ; } } double coeff1 = 0; for (int j = 0; j < solvedCols; ++j) { coeff1 += work1[j] * work1[j]; } double pc2 = previousCost * previousCost; coeff1 = coeff1 / pc2; double coeff2 = lmPar * lmNorm * lmNorm / pc2; double preRed = coeff1 + 2 * coeff2; double dirDer = -(coeff1 + coeff2); // ratio of the actual to the predicted reduction ratio = (preRed == 0) ? 0 : (actRed / preRed); // update the step bound if (ratio <= 0.25) { double tmp = (actRed < 0) ? (0.5 * dirDer / (dirDer + 0.5 * actRed)) : 0.5; if ((0.1 * cost >= previousCost) || (tmp < 0.1)) { tmp = 0.1; } delta = tmp * Math.min(delta, 10.0 * lmNorm); lmPar /= tmp; } else if ((lmPar == 0) || (ratio >= 0.75)) { delta = 2 * lmNorm; lmPar *= 0.5; } // test for successful iteration. if (ratio >= 1.0e-4) { // successful iteration, update the norm firstIteration = false; xNorm = 0; for (int k = 0; k < cols; ++k) { double xK = diag[k] * point[k]; xNorm += xK * xK; } xNorm = Math.sqrt(xNorm); // tests for convergence. // we use the vectorial convergence checker } else { // failed iteration, reset the previous values cost = previousCost; for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; point[pj] = oldX[pj]; } tmpVec = residuals; residuals = oldRes; oldRes = tmpVec; } if (checker==null) { if (((Math.abs(actRed) <= costRelativeTolerance) && (preRed <= costRelativeTolerance) && (ratio <= 2.0)) || (delta <= parRelativeTolerance * xNorm)) { return current; } } else { if (checker.converged(getIterations(), previous, current)) { return current; } } // tests for termination and stringent tolerances // (2.2204e-16 is the machine epsilon for IEEE754) if ((Math.abs(actRed) <= 2.2204e-16) && (preRed <= 2.2204e-16) && (ratio <= 2.0)) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_COST_RELATIVE_TOLERANCE, costRelativeTolerance); } else if (delta <= 2.2204e-16 * xNorm) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_PARAMETERS_RELATIVE_TOLERANCE, parRelativeTolerance); } else if (maxCosine <= 2.2204e-16) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_ORTHOGONALITY_TOLERANCE, orthoTolerance); } } } }
@ Override protected VectorialPointValuePair doOptimize ( ) throws FunctionEvaluationException , OptimizationException , IllegalArgumentException { solvedCols = Math . min ( rows , cols ) ; diagR = new double [ cols ] ; jacNorm = new double [ cols ] ; beta = new double [ cols ] ; permutation = new int [ cols ] ; lmDir = new double [ cols ] ; double delta = 0 ; double xNorm = 0 ; double [ ] diag = new double [ cols ] ; double [ ] oldX = new double [ cols ] ; double [ ] oldRes = new double [ rows ] ; double [ ] work1 = new double [ cols ] ; double [ ] work2 = new double [ cols ] ; double [ ] work3 = new double [ cols ] ; updateResidualsAndCost ( ) ; lmPar = 0 ; boolean firstIteration = true ; VectorialPointValuePair current = new VectorialPointValuePair ( point , objective ) ; while ( true ) { incrementIterationsCounter ( ) ; VectorialPointValuePair previous = current ; updateJacobian ( ) ; qrDecomposition ( ) ; qTy ( residuals ) ; for ( int k = 0 ; k < solvedCols ; ++ k ) { int pk = permutation [ k ] ; jacobian [ k ] [ pk ] = diagR [ pk ] ; } if ( firstIteration ) { xNorm = 0 ; for ( int k = 0 ; k < cols ; ++ k ) { double dk = jacNorm [ k ] ; if ( dk == 0 ) { dk = 1.0 ; } double xk = dk * point [ k ] ; xNorm += xk * xk ; diag [ k ] = dk ; } xNorm = Math . sqrt ( xNorm ) ; delta = ( xNorm == 0 ) ? initialStepBoundFactor : ( initialStepBoundFactor * xNorm ) ; } double maxCosine = 0 ; if ( cost != 0 ) { for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; double s = jacNorm [ pj ] ; if ( s != 0 ) { double sum = 0 ; for ( int i = 0 ; i <= j ; ++ i ) { sum += jacobian [ i ] [ pj ] * residuals [ i ] ; } maxCosine = Math . max ( maxCosine , Math . abs ( sum ) / ( s * cost ) ) ; } } } if ( maxCosine <= orthoTolerance ) { return current ; } for ( int j = 0 ; j < cols ; ++ j ) { diag [ j ] = Math . max ( diag [ j ] , jacNorm [ j ] ) ; } for ( double ratio = 0 ; ratio < 1.0e-4 ; ) { for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; oldX [ pj ] = point [ pj ] ; } double previousCost = cost ; double [ ] tmpVec = residuals ; residuals = oldRes ; oldRes = tmpVec ; determineLMParameter ( oldRes , delta , diag , work1 , work2 , work3 ) ; double lmNorm = 0 ; for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; lmDir [ pj ] = - lmDir [ pj ] ; point [ pj ] = oldX [ pj ] + lmDir [ pj ] ; double s = diag [ pj ] * lmDir [ pj ] ; lmNorm += s * s ; } lmNorm = Math . sqrt ( lmNorm ) ; if ( firstIteration ) { delta = Math . min ( delta , lmNorm ) ; } updateResidualsAndCost ( ) ; current = new VectorialPointValuePair ( point , objective ) ; double actRed = - 1.0 ; if ( 0.1 * cost < previousCost ) { double r = cost / previousCost ; actRed = 1.0 - r * r ; } for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; double dirJ = lmDir [ pj ] ; work1 [ j ] = 0 ; for ( int i = 0 ; i <= j ; ++ i ) { work1 [ i ] += jacobian [ i ] [ pj ] * dirJ ; } } double coeff1 = 0 ; for ( int j = 0 ; j < solvedCols ; ++ j ) { coeff1 += work1 [ j ] * work1 [ j ] ; } double pc2 = previousCost * previousCost ; coeff1 = coeff1 / pc2 ; double coeff2 = lmPar * lmNorm * lmNorm / pc2 ; double preRed = coeff1 + 2 * coeff2 ; double dirDer = - ( coeff1 + coeff2 ) ; ratio = ( preRed == 0 ) ? 0 : ( actRed / preRed ) ; if ( ratio <= 0.25 ) { double tmp = ( actRed < 0 ) ? ( 0.5 * dirDer / ( dirDer + 0.5 * actRed ) ) : 0.5 ; if ( ( 0.1 * cost >= previousCost ) || ( tmp < 0.1 ) ) { tmp = 0.1 ; } delta = tmp * Math . min ( delta , 10.0 * lmNorm ) ; lmPar /= tmp ; } else if ( ( lmPar == 0 ) || ( ratio >= 0.75 ) ) { delta = 2 * lmNorm ; lmPar *= 0.5 ; } if ( ratio >= 1.0e-4 ) { firstIteration = false ; xNorm = 0 ; for ( int k = 0 ; k < cols ; ++ k ) { double xK = diag [ k ] * point [ k ] ; xNorm += xK * xK ; } xNorm = Math . sqrt ( xNorm ) ; } else { cost = previousCost ; for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; point [ pj ] = oldX [ pj ] ; } tmpVec = residuals ; residuals = oldRes ; oldRes = tmpVec ; } if ( checker == null ) { if ( ( ( Math . abs ( actRed ) <= costRelativeTolerance ) && ( preRed <= costRelativeTolerance ) && ( ratio <= 2.0 ) ) || ( delta <= parRelativeTolerance * xNorm ) ) { return current ; } } else { if ( checker . converged ( getIterations ( ) , previous , current ) ) { return current ; } } if ( ( Math . abs ( actRed ) <= 2.2204e-16 ) && ( preRed <= 2.2204e-16 ) && ( ratio <= 2.0 ) ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_COST_RELATIVE_TOLERANCE , costRelativeTolerance ) ; } else if ( delta <= 2.2204e-16 * xNorm ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_PARAMETERS_RELATIVE_TOLERANCE , parRelativeTolerance ) ; } else if ( maxCosine <= 2.2204e-16 ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_ORTHOGONALITY_TOLERANCE , orthoTolerance ) ; } } } }
@Override protected VectorialPointValuePair doOptimize() throws FunctionEvaluationException, OptimizationException, IllegalArgumentException { // arrays shared with the other private methods solvedCols = Math.min(rows, cols); diagR = new double[cols]; jacNorm = new double[cols]; beta = new double[cols]; permutation = new int[cols]; lmDir = new double[cols]; // local point double delta = 0; double xNorm = 0; double[] diag = new double[cols]; double[] oldX = new double[cols]; double[] oldRes = new double[rows]; double[] oldObj = new double[rows]; double[] qtf = new double[rows]; double[] work1 = new double[cols]; double[] work2 = new double[cols]; double[] work3 = new double[cols]; // evaluate the function at the starting point and calculate its norm updateResidualsAndCost(); // outer loop lmPar = 0; boolean firstIteration = true; VectorialPointValuePair current = new VectorialPointValuePair(point, objective); while (true) { for (int i=0;i<rows;i++) { qtf[i]=residuals[i]; } incrementIterationsCounter(); // compute the Q.R. decomposition of the jacobian matrix VectorialPointValuePair previous = current; updateJacobian(); qrDecomposition(); // compute Qt.res qTy(qtf); // now we don't need Q anymore, // so let jacobian contain the R matrix with its diagonal elements for (int k = 0; k < solvedCols; ++k) { int pk = permutation[k]; jacobian[k][pk] = diagR[pk]; } if (firstIteration) { // scale the point according to the norms of the columns // of the initial jacobian xNorm = 0; for (int k = 0; k < cols; ++k) { double dk = jacNorm[k]; if (dk == 0) { dk = 1.0; } double xk = dk * point[k]; xNorm += xk * xk; diag[k] = dk; } xNorm = Math.sqrt(xNorm); // initialize the step bound delta delta = (xNorm == 0) ? initialStepBoundFactor : (initialStepBoundFactor * xNorm); } // check orthogonality between function vector and jacobian columns double maxCosine = 0; if (cost != 0) { for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; double s = jacNorm[pj]; if (s != 0) { double sum = 0; for (int i = 0; i <= j; ++i) { sum += jacobian[i][pj] * qtf[i]; } maxCosine = Math.max(maxCosine, Math.abs(sum) / (s * cost)); } } } if (maxCosine <= orthoTolerance) { // convergence has been reached updateResidualsAndCost(); current = new VectorialPointValuePair(point, objective); return current; } // rescale if necessary for (int j = 0; j < cols; ++j) { diag[j] = Math.max(diag[j], jacNorm[j]); } // inner loop for (double ratio = 0; ratio < 1.0e-4;) { // save the state for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; oldX[pj] = point[pj]; } double previousCost = cost; double[] tmpVec = residuals; residuals = oldRes; oldRes = tmpVec; tmpVec = objective; objective = oldObj; oldObj = tmpVec; // determine the Levenberg-Marquardt parameter determineLMParameter(qtf, delta, diag, work1, work2, work3); // compute the new point and the norm of the evolution direction double lmNorm = 0; for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; lmDir[pj] = -lmDir[pj]; point[pj] = oldX[pj] + lmDir[pj]; double s = diag[pj] * lmDir[pj]; lmNorm += s * s; } lmNorm = Math.sqrt(lmNorm); // on the first iteration, adjust the initial step bound. if (firstIteration) { delta = Math.min(delta, lmNorm); } // evaluate the function at x + p and calculate its norm updateResidualsAndCost(); // compute the scaled actual reduction double actRed = -1.0; if (0.1 * cost < previousCost) { double r = cost / previousCost; actRed = 1.0 - r * r; } // compute the scaled predicted reduction // and the scaled directional derivative for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; double dirJ = lmDir[pj]; work1[j] = 0; for (int i = 0; i <= j; ++i) { work1[i] += jacobian[i][pj] * dirJ; } } double coeff1 = 0; for (int j = 0; j < solvedCols; ++j) { coeff1 += work1[j] * work1[j]; } double pc2 = previousCost * previousCost; coeff1 = coeff1 / pc2; double coeff2 = lmPar * lmNorm * lmNorm / pc2; double preRed = coeff1 + 2 * coeff2; double dirDer = -(coeff1 + coeff2); // ratio of the actual to the predicted reduction ratio = (preRed == 0) ? 0 : (actRed / preRed); // update the step bound if (ratio <= 0.25) { double tmp = (actRed < 0) ? (0.5 * dirDer / (dirDer + 0.5 * actRed)) : 0.5; if ((0.1 * cost >= previousCost) || (tmp < 0.1)) { tmp = 0.1; } delta = tmp * Math.min(delta, 10.0 * lmNorm); lmPar /= tmp; } else if ((lmPar == 0) || (ratio >= 0.75)) { delta = 2 * lmNorm; lmPar *= 0.5; } // test for successful iteration. if (ratio >= 1.0e-4) { // successful iteration, update the norm firstIteration = false; xNorm = 0; for (int k = 0; k < cols; ++k) { double xK = diag[k] * point[k]; xNorm += xK * xK; } xNorm = Math.sqrt(xNorm); current = new VectorialPointValuePair(point, objective); // tests for convergence. if (checker != null) { // we use the vectorial convergence checker if (checker.converged(getIterations(), previous, current)) { return current; } } } else { // failed iteration, reset the previous values cost = previousCost; for (int j = 0; j < solvedCols; ++j) { int pj = permutation[j]; point[pj] = oldX[pj]; } tmpVec = residuals; residuals = oldRes; oldRes = tmpVec; tmpVec = objective; objective = oldObj; oldObj = tmpVec; } if (checker==null) { if (((Math.abs(actRed) <= costRelativeTolerance) && (preRed <= costRelativeTolerance) && (ratio <= 2.0)) || (delta <= parRelativeTolerance * xNorm)) { return current; } } // tests for termination and stringent tolerances // (2.2204e-16 is the machine epsilon for IEEE754) if ((Math.abs(actRed) <= 2.2204e-16) && (preRed <= 2.2204e-16) && (ratio <= 2.0)) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_COST_RELATIVE_TOLERANCE, costRelativeTolerance); } else if (delta <= 2.2204e-16 * xNorm) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_PARAMETERS_RELATIVE_TOLERANCE, parRelativeTolerance); } else if (maxCosine <= 2.2204e-16) { throw new OptimizationException(LocalizedFormats.TOO_SMALL_ORTHOGONALITY_TOLERANCE, orthoTolerance); } } } }
@ Override protected VectorialPointValuePair doOptimize ( ) throws FunctionEvaluationException , OptimizationException , IllegalArgumentException { solvedCols = Math . min ( rows , cols ) ; diagR = new double [ cols ] ; jacNorm = new double [ cols ] ; beta = new double [ cols ] ; permutation = new int [ cols ] ; lmDir = new double [ cols ] ; double delta = 0 ; double xNorm = 0 ; double [ ] diag = new double [ cols ] ; double [ ] oldX = new double [ cols ] ; double [ ] oldRes = new double [ rows ] ; double [ ] oldObj = new double [ rows ] ; double [ ] qtf = new double [ rows ] ; double [ ] work1 = new double [ cols ] ; double [ ] work2 = new double [ cols ] ; double [ ] work3 = new double [ cols ] ; updateResidualsAndCost ( ) ; lmPar = 0 ; boolean firstIteration = true ; VectorialPointValuePair current = new VectorialPointValuePair ( point , objective ) ; while ( true ) { for ( int i = 0 ; i < rows ; i ++ ) { qtf [ i ] = residuals [ i ] ; } incrementIterationsCounter ( ) ; VectorialPointValuePair previous = current ; updateJacobian ( ) ; qrDecomposition ( ) ; qTy ( qtf ) ; for ( int k = 0 ; k < solvedCols ; ++ k ) { int pk = permutation [ k ] ; jacobian [ k ] [ pk ] = diagR [ pk ] ; } if ( firstIteration ) { xNorm = 0 ; for ( int k = 0 ; k < cols ; ++ k ) { double dk = jacNorm [ k ] ; if ( dk == 0 ) { dk = 1.0 ; } double xk = dk * point [ k ] ; xNorm += xk * xk ; diag [ k ] = dk ; } xNorm = Math . sqrt ( xNorm ) ; delta = ( xNorm == 0 ) ? initialStepBoundFactor : ( initialStepBoundFactor * xNorm ) ; } double maxCosine = 0 ; if ( cost != 0 ) { for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; double s = jacNorm [ pj ] ; if ( s != 0 ) { double sum = 0 ; for ( int i = 0 ; i <= j ; ++ i ) { sum += jacobian [ i ] [ pj ] * qtf [ i ] ; } maxCosine = Math . max ( maxCosine , Math . abs ( sum ) / ( s * cost ) ) ; } } } if ( maxCosine <= orthoTolerance ) { updateResidualsAndCost ( ) ; current = new VectorialPointValuePair ( point , objective ) ; return current ; } for ( int j = 0 ; j < cols ; ++ j ) { diag [ j ] = Math . max ( diag [ j ] , jacNorm [ j ] ) ; } for ( double ratio = 0 ; ratio < 1.0e-4 ; ) { for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; oldX [ pj ] = point [ pj ] ; } double previousCost = cost ; double [ ] tmpVec = residuals ; residuals = oldRes ; oldRes = tmpVec ; tmpVec = objective ; objective = oldObj ; oldObj = tmpVec ; determineLMParameter ( qtf , delta , diag , work1 , work2 , work3 ) ; double lmNorm = 0 ; for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; lmDir [ pj ] = - lmDir [ pj ] ; point [ pj ] = oldX [ pj ] + lmDir [ pj ] ; double s = diag [ pj ] * lmDir [ pj ] ; lmNorm += s * s ; } lmNorm = Math . sqrt ( lmNorm ) ; if ( firstIteration ) { delta = Math . min ( delta , lmNorm ) ; } updateResidualsAndCost ( ) ; double actRed = - 1.0 ; if ( 0.1 * cost < previousCost ) { double r = cost / previousCost ; actRed = 1.0 - r * r ; } for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; double dirJ = lmDir [ pj ] ; work1 [ j ] = 0 ; for ( int i = 0 ; i <= j ; ++ i ) { work1 [ i ] += jacobian [ i ] [ pj ] * dirJ ; } } double coeff1 = 0 ; for ( int j = 0 ; j < solvedCols ; ++ j ) { coeff1 += work1 [ j ] * work1 [ j ] ; } double pc2 = previousCost * previousCost ; coeff1 = coeff1 / pc2 ; double coeff2 = lmPar * lmNorm * lmNorm / pc2 ; double preRed = coeff1 + 2 * coeff2 ; double dirDer = - ( coeff1 + coeff2 ) ; ratio = ( preRed == 0 ) ? 0 : ( actRed / preRed ) ; if ( ratio <= 0.25 ) { double tmp = ( actRed < 0 ) ? ( 0.5 * dirDer / ( dirDer + 0.5 * actRed ) ) : 0.5 ; if ( ( 0.1 * cost >= previousCost ) || ( tmp < 0.1 ) ) { tmp = 0.1 ; } delta = tmp * Math . min ( delta , 10.0 * lmNorm ) ; lmPar /= tmp ; } else if ( ( lmPar == 0 ) || ( ratio >= 0.75 ) ) { delta = 2 * lmNorm ; lmPar *= 0.5 ; } if ( ratio >= 1.0e-4 ) { firstIteration = false ; xNorm = 0 ; for ( int k = 0 ; k < cols ; ++ k ) { double xK = diag [ k ] * point [ k ] ; xNorm += xK * xK ; } xNorm = Math . sqrt ( xNorm ) ; current = new VectorialPointValuePair ( point , objective ) ; if ( checker != null ) { if ( checker . converged ( getIterations ( ) , previous , current ) ) { return current ; } } } else { cost = previousCost ; for ( int j = 0 ; j < solvedCols ; ++ j ) { int pj = permutation [ j ] ; point [ pj ] = oldX [ pj ] ; } tmpVec = residuals ; residuals = oldRes ; oldRes = tmpVec ; tmpVec = objective ; objective = oldObj ; oldObj = tmpVec ; } if ( checker == null ) { if ( ( ( Math . abs ( actRed ) <= costRelativeTolerance ) && ( preRed <= costRelativeTolerance ) && ( ratio <= 2.0 ) ) || ( delta <= parRelativeTolerance * xNorm ) ) { return current ; } } if ( ( Math . abs ( actRed ) <= 2.2204e-16 ) && ( preRed <= 2.2204e-16 ) && ( ratio <= 2.0 ) ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_COST_RELATIVE_TOLERANCE , costRelativeTolerance ) ; } else if ( delta <= 2.2204e-16 * xNorm ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_PARAMETERS_RELATIVE_TOLERANCE , parRelativeTolerance ) ; } else if ( maxCosine <= 2.2204e-16 ) { throw new OptimizationException ( LocalizedFormats . TOO_SMALL_ORTHOGONALITY_TOLERANCE , orthoTolerance ) ; } } } }
Mockito
3
src/org/mockito/internal/invocation/InvocationMatcher.java
118
141
ArgumentCaptor no longer working for varargs
I ran into the issue described here: http://stackoverflow.com/questions/27303562/why-does-upgrading-mockito-from-1-9-5-to-1-10-8-break-this-captor
public void captureArgumentsFrom(Invocation invocation) { if (invocation.getMethod().isVarArgs()) { int indexOfVararg = invocation.getRawArguments().length - 1; for (int position = 0; position < indexOfVararg; position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } for (int position = indexOfVararg; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getRawArguments()[position - indexOfVararg]); } } } else { for (int position = 0; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } } }
public void captureArgumentsFrom ( Invocation invocation ) { if ( invocation . getMethod ( ) . isVarArgs ( ) ) { int indexOfVararg = invocation . getRawArguments ( ) . length - 1 ; for ( int position = 0 ; position < indexOfVararg ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } for ( int position = indexOfVararg ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getRawArguments ( ) [ position - indexOfVararg ] ) ; } } } else { for ( int position = 0 ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } } }
public void captureArgumentsFrom(Invocation invocation) { if (invocation.getMethod().isVarArgs()) { int indexOfVararg = invocation.getRawArguments().length - 1; for (int position = 0; position < indexOfVararg; position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } for (Matcher m : uniqueMatcherSet(indexOfVararg)) { if (m instanceof CapturesArguments) { Object rawArgument = invocation.getRawArguments()[indexOfVararg]; for (int i = 0; i < Array.getLength(rawArgument); i++) { ((CapturesArguments) m).captureFrom(Array.get(rawArgument, i)); } } } } else { for (int position = 0; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } } }
public void captureArgumentsFrom ( Invocation invocation ) { if ( invocation . getMethod ( ) . isVarArgs ( ) ) { int indexOfVararg = invocation . getRawArguments ( ) . length - 1 ; for ( int position = 0 ; position < indexOfVararg ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } for ( Matcher m : uniqueMatcherSet ( indexOfVararg ) ) { if ( m instanceof CapturesArguments ) { Object rawArgument = invocation . getRawArguments ( ) [ indexOfVararg ] ; for ( int i = 0 ; i < Array . getLength ( rawArgument ) ; i ++ ) { ( ( CapturesArguments ) m ) . captureFrom ( Array . get ( rawArgument , i ) ) ; } } } } else { for ( int position = 0 ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } } }
JxPath
15
src/java/org/apache/commons/jxpath/ri/axes/UnionContext.java
45
64
Core union operation does not sort result nodes according to document order
Source document: <MAIN><A>avalue</A><B>bvalue</B></MAIN> According to string() function defintion: "A node-set is converted to a string by returning the string-value of the node in the node-set that is first in document order. If the node-set is empty, an empty string is returned." Following XPath calculated incorrectly: string(/MAIN/B | /MAIN/A) Expected result: "avalue" Actual value: "bvalue" Reason: sorting of result nodes is missing from CoreOperationUnion
public boolean setPosition(int position) { if (!prepared) { prepared = true; BasicNodeSet nodeSet = (BasicNodeSet) getNodeSet(); ArrayList pointers = new ArrayList(); for (int i = 0; i < contexts.length; i++) { EvalContext ctx = (EvalContext) contexts[i]; while (ctx.nextSet()) { while (ctx.nextNode()) { NodePointer ptr = ctx.getCurrentNodePointer(); if (!pointers.contains(ptr)) { nodeSet.add(ptr); pointers.add(ptr); } } } } } return super.setPosition(position); }
public boolean setPosition ( int position ) { if ( ! prepared ) { prepared = true ; BasicNodeSet nodeSet = ( BasicNodeSet ) getNodeSet ( ) ; ArrayList pointers = new ArrayList ( ) ; for ( int i = 0 ; i < contexts . length ; i ++ ) { EvalContext ctx = ( EvalContext ) contexts [ i ] ; while ( ctx . nextSet ( ) ) { while ( ctx . nextNode ( ) ) { NodePointer ptr = ctx . getCurrentNodePointer ( ) ; if ( ! pointers . contains ( ptr ) ) { nodeSet . add ( ptr ) ; pointers . add ( ptr ) ; } } } } } return super . setPosition ( position ) ; }
public boolean setPosition(int position) { if (!prepared) { prepared = true; BasicNodeSet nodeSet = (BasicNodeSet) getNodeSet(); ArrayList pointers = new ArrayList(); for (int i = 0; i < contexts.length; i++) { EvalContext ctx = (EvalContext) contexts[i]; while (ctx.nextSet()) { while (ctx.nextNode()) { NodePointer ptr = ctx.getCurrentNodePointer(); if (!pointers.contains(ptr)) { pointers.add(ptr); } } } } sortPointers(pointers); for (Iterator it = pointers.iterator(); it.hasNext();) { nodeSet.add((Pointer) it.next()); } } return super.setPosition(position); }
public boolean setPosition ( int position ) { if ( ! prepared ) { prepared = true ; BasicNodeSet nodeSet = ( BasicNodeSet ) getNodeSet ( ) ; ArrayList pointers = new ArrayList ( ) ; for ( int i = 0 ; i < contexts . length ; i ++ ) { EvalContext ctx = ( EvalContext ) contexts [ i ] ; while ( ctx . nextSet ( ) ) { while ( ctx . nextNode ( ) ) { NodePointer ptr = ctx . getCurrentNodePointer ( ) ; if ( ! pointers . contains ( ptr ) ) { pointers . add ( ptr ) ; } } } } sortPointers ( pointers ) ; for ( Iterator it = pointers . iterator ( ) ; it . hasNext ( ) ; ) { nodeSet . add ( ( Pointer ) it . next ( ) ) ; } } return super . setPosition ( position ) ; }
JacksonDatabind
56
src/main/java/com/fasterxml/jackson/databind/deser/std/FromStringDeserializer.java
205
276
Deserializing locale assumes JDK separator (underscore), does not accept RFC specified (hyphen)
When deserializing a locale Jackson currently uses the underscore character as the separator rather than the dash. Specifically, in FromStringDeserializer.java line 234: ``` int ix = value.indexOf('_'); ``` Many locale implementations use dash as the separator as per https://tools.ietf.org/html/rfc5646 Given the RFC states that only the characters a-z A-Z and - are valid it should be possible to leave the current code in for backward-compatibility but it should also check for '-' as a separator.
@Override protected Object _deserialize(String value, DeserializationContext ctxt) throws IOException { switch (_kind) { case STD_FILE: return new File(value); case STD_URL: return new URL(value); case STD_URI: return URI.create(value); case STD_CLASS: try { return ctxt.findClass(value); } catch (Exception e) { throw ctxt.instantiationException(_valueClass, ClassUtil.getRootCause(e)); } case STD_JAVA_TYPE: return ctxt.getTypeFactory().constructFromCanonical(value); case STD_CURRENCY: // will throw IAE if unknown: return Currency.getInstance(value); case STD_PATTERN: // will throw IAE (or its subclass) if malformed return Pattern.compile(value); case STD_LOCALE: { int ix = value.indexOf('_'); if (ix < 0) { // single argument return new Locale(value); } String first = value.substring(0, ix); value = value.substring(ix+1); ix = value.indexOf('_'); if (ix < 0) { // two pieces return new Locale(first, value); } String second = value.substring(0, ix); return new Locale(first, second, value.substring(ix+1)); } case STD_CHARSET: return Charset.forName(value); case STD_TIME_ZONE: return TimeZone.getTimeZone(value); case STD_INET_ADDRESS: return InetAddress.getByName(value); case STD_INET_SOCKET_ADDRESS: if (value.startsWith("[")) { // bracketed IPv6 (with port number) int i = value.lastIndexOf(']'); if (i == -1) { throw new InvalidFormatException(ctxt.getParser(), "Bracketed IPv6 address must contain closing bracket", value, InetSocketAddress.class); } int j = value.indexOf(':', i); int port = j > -1 ? Integer.parseInt(value.substring(j + 1)) : 0; return new InetSocketAddress(value.substring(0, i + 1), port); } else { int ix = value.indexOf(':'); if (ix >= 0 && value.indexOf(':', ix + 1) < 0) { // host:port int port = Integer.parseInt(value.substring(ix+1)); return new InetSocketAddress(value.substring(0, ix), port); } // host or unbracketed IPv6, without port number return new InetSocketAddress(value, 0); } } throw new IllegalArgumentException(); }
@ Override protected Object _deserialize ( String value , DeserializationContext ctxt ) throws IOException { switch ( _kind ) { case STD_FILE : return new File ( value ) ; case STD_URL : return new URL ( value ) ; case STD_URI : return URI . create ( value ) ; case STD_CLASS : try { return ctxt . findClass ( value ) ; } catch ( Exception e ) { throw ctxt . instantiationException ( _valueClass , ClassUtil . getRootCause ( e ) ) ; } case STD_JAVA_TYPE : return ctxt . getTypeFactory ( ) . constructFromCanonical ( value ) ; case STD_CURRENCY : return Currency . getInstance ( value ) ; case STD_PATTERN : return Pattern . compile ( value ) ; case STD_LOCALE : { int ix = value . indexOf ( '_' ) ; if ( ix < 0 ) { return new Locale ( value ) ; } String first = value . substring ( 0 , ix ) ; value = value . substring ( ix + 1 ) ; ix = value . indexOf ( '_' ) ; if ( ix < 0 ) { return new Locale ( first , value ) ; } String second = value . substring ( 0 , ix ) ; return new Locale ( first , second , value . substring ( ix + 1 ) ) ; } case STD_CHARSET : return Charset . forName ( value ) ; case STD_TIME_ZONE : return TimeZone . getTimeZone ( value ) ; case STD_INET_ADDRESS : return InetAddress . getByName ( value ) ; case STD_INET_SOCKET_ADDRESS : if ( value . startsWith ( "[" ) ) { int i = value . lastIndexOf ( ']' ) ; if ( i == - 1 ) { throw new InvalidFormatException ( ctxt . getParser ( ) , "Bracketed IPv6 address must contain closing bracket" , value , InetSocketAddress . class ) ; } int j = value . indexOf ( ':' , i ) ; int port = j > - 1 ? Integer . parseInt ( value . substring ( j + 1 ) ) : 0 ; return new InetSocketAddress ( value . substring ( 0 , i + 1 ) , port ) ; } else { int ix = value . indexOf ( ':' ) ; if ( ix >= 0 && value . indexOf ( ':' , ix + 1 ) < 0 ) { int port = Integer . parseInt ( value . substring ( ix + 1 ) ) ; return new InetSocketAddress ( value . substring ( 0 , ix ) , port ) ; } return new InetSocketAddress ( value , 0 ) ; } } throw new IllegalArgumentException ( ) ; }
@Override protected Object _deserialize(String value, DeserializationContext ctxt) throws IOException { switch (_kind) { case STD_FILE: return new File(value); case STD_URL: return new URL(value); case STD_URI: return URI.create(value); case STD_CLASS: try { return ctxt.findClass(value); } catch (Exception e) { throw ctxt.instantiationException(_valueClass, ClassUtil.getRootCause(e)); } case STD_JAVA_TYPE: return ctxt.getTypeFactory().constructFromCanonical(value); case STD_CURRENCY: // will throw IAE if unknown: return Currency.getInstance(value); case STD_PATTERN: // will throw IAE (or its subclass) if malformed return Pattern.compile(value); case STD_LOCALE: { int ix = _firstHyphenOrUnderscore(value); if (ix < 0) { // single argument return new Locale(value); } String first = value.substring(0, ix); value = value.substring(ix+1); ix = _firstHyphenOrUnderscore(value); if (ix < 0) { // two pieces return new Locale(first, value); } String second = value.substring(0, ix); return new Locale(first, second, value.substring(ix+1)); } case STD_CHARSET: return Charset.forName(value); case STD_TIME_ZONE: return TimeZone.getTimeZone(value); case STD_INET_ADDRESS: return InetAddress.getByName(value); case STD_INET_SOCKET_ADDRESS: if (value.startsWith("[")) { // bracketed IPv6 (with port number) int i = value.lastIndexOf(']'); if (i == -1) { throw new InvalidFormatException(ctxt.getParser(), "Bracketed IPv6 address must contain closing bracket", value, InetSocketAddress.class); } int j = value.indexOf(':', i); int port = j > -1 ? Integer.parseInt(value.substring(j + 1)) : 0; return new InetSocketAddress(value.substring(0, i + 1), port); } else { int ix = value.indexOf(':'); if (ix >= 0 && value.indexOf(':', ix + 1) < 0) { // host:port int port = Integer.parseInt(value.substring(ix+1)); return new InetSocketAddress(value.substring(0, ix), port); } // host or unbracketed IPv6, without port number return new InetSocketAddress(value, 0); } } throw new IllegalArgumentException(); }
@ Override protected Object _deserialize ( String value , DeserializationContext ctxt ) throws IOException { switch ( _kind ) { case STD_FILE : return new File ( value ) ; case STD_URL : return new URL ( value ) ; case STD_URI : return URI . create ( value ) ; case STD_CLASS : try { return ctxt . findClass ( value ) ; } catch ( Exception e ) { throw ctxt . instantiationException ( _valueClass , ClassUtil . getRootCause ( e ) ) ; } case STD_JAVA_TYPE : return ctxt . getTypeFactory ( ) . constructFromCanonical ( value ) ; case STD_CURRENCY : return Currency . getInstance ( value ) ; case STD_PATTERN : return Pattern . compile ( value ) ; case STD_LOCALE : { int ix = _firstHyphenOrUnderscore ( value ) ; if ( ix < 0 ) { return new Locale ( value ) ; } String first = value . substring ( 0 , ix ) ; value = value . substring ( ix + 1 ) ; ix = _firstHyphenOrUnderscore ( value ) ; if ( ix < 0 ) { return new Locale ( first , value ) ; } String second = value . substring ( 0 , ix ) ; return new Locale ( first , second , value . substring ( ix + 1 ) ) ; } case STD_CHARSET : return Charset . forName ( value ) ; case STD_TIME_ZONE : return TimeZone . getTimeZone ( value ) ; case STD_INET_ADDRESS : return InetAddress . getByName ( value ) ; case STD_INET_SOCKET_ADDRESS : if ( value . startsWith ( "[" ) ) { int i = value . lastIndexOf ( ']' ) ; if ( i == - 1 ) { throw new InvalidFormatException ( ctxt . getParser ( ) , "Bracketed IPv6 address must contain closing bracket" , value , InetSocketAddress . class ) ; } int j = value . indexOf ( ':' , i ) ; int port = j > - 1 ? Integer . parseInt ( value . substring ( j + 1 ) ) : 0 ; return new InetSocketAddress ( value . substring ( 0 , i + 1 ) , port ) ; } else { int ix = value . indexOf ( ':' ) ; if ( ix >= 0 && value . indexOf ( ':' , ix + 1 ) < 0 ) { int port = Integer . parseInt ( value . substring ( ix + 1 ) ) ; return new InetSocketAddress ( value . substring ( 0 , ix ) , port ) ; } return new InetSocketAddress ( value , 0 ) ; } } throw new IllegalArgumentException ( ) ; }
JacksonDatabind
57
src/main/java/com/fasterxml/jackson/databind/ObjectReader.java
1435
1443
`ObjectReader.readValues()` ignores offset and length when reading an array
ObjectReader.readValues ignores offset and length when reading an array. If _dataFormatReaders it will always use the full array: https://github.com/FasterXML/jackson-databind/blob/2.7/src/main/java/com/fasterxml/jackson/databind/ObjectReader.java#L1435
public <T> MappingIterator<T> readValues(byte[] src, int offset, int length) throws IOException, JsonProcessingException { if (_dataFormatReaders != null) { return _detectBindAndReadValues(_dataFormatReaders.findFormat(src, offset, length), false); } return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src), true)); }
public < T > MappingIterator < T > readValues ( byte [ ] src , int offset , int length ) throws IOException , JsonProcessingException { if ( _dataFormatReaders != null ) { return _detectBindAndReadValues ( _dataFormatReaders . findFormat ( src , offset , length ) , false ) ; } return _bindAndReadValues ( _considerFilter ( _parserFactory . createParser ( src ) , true ) ) ; }
public <T> MappingIterator<T> readValues(byte[] src, int offset, int length) throws IOException, JsonProcessingException { if (_dataFormatReaders != null) { return _detectBindAndReadValues(_dataFormatReaders.findFormat(src, offset, length), false); } return _bindAndReadValues(_considerFilter(_parserFactory.createParser(src, offset, length), true)); }
public < T > MappingIterator < T > readValues ( byte [ ] src , int offset , int length ) throws IOException , JsonProcessingException { if ( _dataFormatReaders != null ) { return _detectBindAndReadValues ( _dataFormatReaders . findFormat ( src , offset , length ) , false ) ; } return _bindAndReadValues ( _considerFilter ( _parserFactory . createParser ( src , offset , length ) , true ) ) ; }
Mockito
2
src/org/mockito/internal/util/Timer.java
9
11
Mockito.after() method accepts negative timeperiods and subsequent verifications always pass
e.g. ``` Runnable runnable = Mockito.mock(Runnable.class); Mockito.verify(runnable, Mockito.never()).run(); // passes as expected Mockito.verify(runnable, Mockito.after(1000).never()).run(); // passes as expected Mockito.verify(runnable, Mockito.after(-1000).atLeastOnce()).run(); // passes incorrectly ```
public Timer(long durationMillis) { this.durationMillis = durationMillis; }
public Timer ( long durationMillis ) { this . durationMillis = durationMillis ; }
public Timer(long durationMillis) { validateInput(durationMillis); this.durationMillis = durationMillis; }
public Timer ( long durationMillis ) { validateInput ( durationMillis ) ; this . durationMillis = durationMillis ; }
Cli
33
src/main/java/org/apache/commons/cli/HelpFormatter.java
726
732
HelpFormatter strips leading whitespaces in the footer
I discovered a bug in Commons CLI while using it through Groovy's CliBuilder. See the following issue: http://jira.codehaus.org/browse/GROOVY-4313?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel Copied: The following code: def cli = new CliBuilder(footer: "line1:\n line2:\n") cli.usage() Produces the following output: line1 line2 Note that there are no whitespaces before "line2". Replacing them with "\t" doesn't solve the problem either.
public void printWrapped(PrintWriter pw, int width, int nextLineTabStop, String text) { StringBuffer sb = new StringBuffer(text.length()); renderWrappedText(sb, width, nextLineTabStop, text); pw.println(sb.toString()); }
public void printWrapped ( PrintWriter pw , int width , int nextLineTabStop , String text ) { StringBuffer sb = new StringBuffer ( text . length ( ) ) ; renderWrappedText ( sb , width , nextLineTabStop , text ) ; pw . println ( sb . toString ( ) ) ; }
public void printWrapped(PrintWriter pw, int width, int nextLineTabStop, String text) { StringBuffer sb = new StringBuffer(text.length()); renderWrappedTextBlock(sb, width, nextLineTabStop, text); pw.println(sb.toString()); }
public void printWrapped ( PrintWriter pw , int width , int nextLineTabStop , String text ) { StringBuffer sb = new StringBuffer ( text . length ( ) ) ; renderWrappedTextBlock ( sb , width , nextLineTabStop , text ) ; pw . println ( sb . toString ( ) ) ; }
JacksonCore
21
src/main/java/com/fasterxml/jackson/core/filter/FilteringParserDelegate.java
226
454
`FilteringParserDelegate` seems to miss last closing `END_OBJECT`
(note: adding a failing test for this case) Looks like with settings like: ```java JsonParser p = new FilteringParserDelegate(p0, new NameMatchFilter("value"), true, // includePath false // multipleMatches ); ``` and input ```json { "a":123, "array":[1,2], "ob": { "value0":2, "value":3, "value2":4 }, "b":true } ``` output will be like: ```json {"ob":{"value":3} ``` (note the missing trailing `}` for closing `END_OBJECT`)
@Override public JsonToken nextToken() throws IOException { // 23-May-2017, tatu: To be honest, code here is rather hairy and I don't like all // conditionals; and it seems odd to return `null` but NOT considering input // as closed... would love a rewrite to simplify/clear up logic here. // Check for _allowMultipleMatches - false and at least there is one token - which is _currToken // check for no buffered context _exposedContext - null // If all the conditions matches then check for scalar / non-scalar property if (!_allowMultipleMatches && (_currToken != null) && (_exposedContext == null)) { //if not scalar and ended successfully, and !includePath, then return null if (_currToken.isStructEnd()) { if (_headContext.isStartHandled()) { return (_currToken = null); } } else if (_currToken.isScalarValue()) { //else if scalar, and scalar not present in obj/array and !includePath and INCLUDE_ALL matched once // then return null if (!_headContext.isStartHandled() && (_itemFilter == TokenFilter.INCLUDE_ALL)) { return (_currToken = null); } } } // Anything buffered? TokenFilterContext ctxt = _exposedContext; if (ctxt != null) { while (true) { JsonToken t = ctxt.nextTokenToRead(); if (t != null) { _currToken = t; return t; } // all done with buffered stuff? if (ctxt == _headContext) { _exposedContext = null; if (ctxt.inArray()) { t = delegate.getCurrentToken(); // Is this guaranteed to work without further checks? // if (t != JsonToken.START_ARRAY) { _currToken = t; return t; } // Almost! Most likely still have the current token; // with the sole exception of /* t = delegate.getCurrentToken(); if (t != JsonToken.FIELD_NAME) { _currToken = t; return t; } */ break; } // If not, traverse down the context chain ctxt = _headContext.findChildOf(ctxt); _exposedContext = ctxt; if (ctxt == null) { // should never occur throw _constructError("Unexpected problem: chain of filtered context broken"); } } } // If not, need to read more. If we got any: JsonToken t = delegate.nextToken(); if (t == null) { // no strict need to close, since we have no state here _currToken = t; return t; } // otherwise... to include or not? TokenFilter f; switch (t.id()) { case ID_START_ARRAY: f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildArrayContext(f, true); return (_currToken = t); } if (f == null) { // does this occur? delegate.skipChildren(); break; } // Otherwise still iffy, need to check f = _headContext.checkValue(f); if (f == null) { delegate.skipChildren(); break; } if (f != TokenFilter.INCLUDE_ALL) { f = f.filterStartArray(); } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildArrayContext(f, true); return (_currToken = t); } _headContext = _headContext.createChildArrayContext(f, false); // Also: only need buffering if parent path to be included if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } break; case ID_START_OBJECT: f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildObjectContext(f, true); return (_currToken = t); } if (f == null) { // does this occur? delegate.skipChildren(); break; } // Otherwise still iffy, need to check f = _headContext.checkValue(f); if (f == null) { delegate.skipChildren(); break; } if (f != TokenFilter.INCLUDE_ALL) { f = f.filterStartObject(); } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildObjectContext(f, true); return (_currToken = t); } _headContext = _headContext.createChildObjectContext(f, false); // Also: only need buffering if parent path to be included if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } // note: inclusion of surrounding Object handled separately via // FIELD_NAME break; case ID_END_ARRAY: case ID_END_OBJECT: { boolean returnEnd = _headContext.isStartHandled(); f = _headContext.getFilter(); if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) { f.filterFinishArray(); } _headContext = _headContext.getParent(); _itemFilter = _headContext.getFilter(); if (returnEnd) { return (_currToken = t); } } break; case ID_FIELD_NAME: { final String name = delegate.getCurrentName(); // note: this will also set 'needToHandleName' f = _headContext.setFieldName(name); if (f == TokenFilter.INCLUDE_ALL) { _itemFilter = f; if (!_includePath) { // Minor twist here: if parent NOT included, may need to induce output of // surrounding START_OBJECT/END_OBJECT if (_includeImmediateParent && !_headContext.isStartHandled()) { t = _headContext.nextTokenToRead(); // returns START_OBJECT but also marks it handled _exposedContext = _headContext; } } return (_currToken = t); } if (f == null) { delegate.nextToken(); delegate.skipChildren(); break; } f = f.includeProperty(name); if (f == null) { delegate.nextToken(); delegate.skipChildren(); break; } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { if (_includePath) { return (_currToken = t); } } if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } break; } default: // scalar value f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { return (_currToken = t); } if (f != null) { f = _headContext.checkValue(f); if ((f == TokenFilter.INCLUDE_ALL) || ((f != null) && f.includeValue(delegate))) { return (_currToken = t); } } // Otherwise not included (leaves must be explicitly included) break; } // We get here if token was not yet found; offlined handling return _nextToken2(); }
@ Override public JsonToken nextToken ( ) throws IOException { if ( ! _allowMultipleMatches && ( _currToken != null ) && ( _exposedContext == null ) ) { if ( _currToken . isStructEnd ( ) ) { if ( _headContext . isStartHandled ( ) ) { return ( _currToken = null ) ; } } else if ( _currToken . isScalarValue ( ) ) { if ( ! _headContext . isStartHandled ( ) && ( _itemFilter == TokenFilter . INCLUDE_ALL ) ) { return ( _currToken = null ) ; } } } TokenFilterContext ctxt = _exposedContext ; if ( ctxt != null ) { while ( true ) { JsonToken t = ctxt . nextTokenToRead ( ) ; if ( t != null ) { _currToken = t ; return t ; } if ( ctxt == _headContext ) { _exposedContext = null ; if ( ctxt . inArray ( ) ) { t = delegate . getCurrentToken ( ) ; _currToken = t ; return t ; } break ; } ctxt = _headContext . findChildOf ( ctxt ) ; _exposedContext = ctxt ; if ( ctxt == null ) { throw _constructError ( "Unexpected problem: chain of filtered context broken" ) ; } } } JsonToken t = delegate . nextToken ( ) ; if ( t == null ) { _currToken = t ; return t ; } TokenFilter f ; switch ( t . id ( ) ) { case ID_START_ARRAY : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildArrayContext ( f , true ) ; return ( _currToken = t ) ; } if ( f == null ) { delegate . skipChildren ( ) ; break ; } f = _headContext . checkValue ( f ) ; if ( f == null ) { delegate . skipChildren ( ) ; break ; } if ( f != TokenFilter . INCLUDE_ALL ) { f = f . filterStartArray ( ) ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildArrayContext ( f , true ) ; return ( _currToken = t ) ; } _headContext = _headContext . createChildArrayContext ( f , false ) ; if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; case ID_START_OBJECT : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildObjectContext ( f , true ) ; return ( _currToken = t ) ; } if ( f == null ) { delegate . skipChildren ( ) ; break ; } f = _headContext . checkValue ( f ) ; if ( f == null ) { delegate . skipChildren ( ) ; break ; } if ( f != TokenFilter . INCLUDE_ALL ) { f = f . filterStartObject ( ) ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildObjectContext ( f , true ) ; return ( _currToken = t ) ; } _headContext = _headContext . createChildObjectContext ( f , false ) ; if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; case ID_END_ARRAY : case ID_END_OBJECT : { boolean returnEnd = _headContext . isStartHandled ( ) ; f = _headContext . getFilter ( ) ; if ( ( f != null ) && ( f != TokenFilter . INCLUDE_ALL ) ) { f . filterFinishArray ( ) ; } _headContext = _headContext . getParent ( ) ; _itemFilter = _headContext . getFilter ( ) ; if ( returnEnd ) { return ( _currToken = t ) ; } } break ; case ID_FIELD_NAME : { final String name = delegate . getCurrentName ( ) ; f = _headContext . setFieldName ( name ) ; if ( f == TokenFilter . INCLUDE_ALL ) { _itemFilter = f ; if ( ! _includePath ) { if ( _includeImmediateParent && ! _headContext . isStartHandled ( ) ) { t = _headContext . nextTokenToRead ( ) ; _exposedContext = _headContext ; } } return ( _currToken = t ) ; } if ( f == null ) { delegate . nextToken ( ) ; delegate . skipChildren ( ) ; break ; } f = f . includeProperty ( name ) ; if ( f == null ) { delegate . nextToken ( ) ; delegate . skipChildren ( ) ; break ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { if ( _includePath ) { return ( _currToken = t ) ; } } if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; } default : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { return ( _currToken = t ) ; } if ( f != null ) { f = _headContext . checkValue ( f ) ; if ( ( f == TokenFilter . INCLUDE_ALL ) || ( ( f != null ) && f . includeValue ( delegate ) ) ) { return ( _currToken = t ) ; } } break ; } return _nextToken2 ( ) ; }
@Override public JsonToken nextToken() throws IOException { // 23-May-2017, tatu: To be honest, code here is rather hairy and I don't like all // conditionals; and it seems odd to return `null` but NOT considering input // as closed... would love a rewrite to simplify/clear up logic here. // Check for _allowMultipleMatches - false and at least there is one token - which is _currToken // check for no buffered context _exposedContext - null // If all the conditions matches then check for scalar / non-scalar property if (!_allowMultipleMatches && (_currToken != null) && (_exposedContext == null)) { //if not scalar and ended successfully, and !includePath, then return null if (!_includePath) { if (_currToken.isStructEnd()) { if (_headContext.isStartHandled()) { return (_currToken = null); } } else if (_currToken.isScalarValue()) { //else if scalar, and scalar not present in obj/array and !includePath and INCLUDE_ALL matched once // then return null if (!_headContext.isStartHandled() && (_itemFilter == TokenFilter.INCLUDE_ALL)) { return (_currToken = null); } } } } // Anything buffered? TokenFilterContext ctxt = _exposedContext; if (ctxt != null) { while (true) { JsonToken t = ctxt.nextTokenToRead(); if (t != null) { _currToken = t; return t; } // all done with buffered stuff? if (ctxt == _headContext) { _exposedContext = null; if (ctxt.inArray()) { t = delegate.getCurrentToken(); // Is this guaranteed to work without further checks? // if (t != JsonToken.START_ARRAY) { _currToken = t; return t; } // Almost! Most likely still have the current token; // with the sole exception of /* t = delegate.getCurrentToken(); if (t != JsonToken.FIELD_NAME) { _currToken = t; return t; } */ break; } // If not, traverse down the context chain ctxt = _headContext.findChildOf(ctxt); _exposedContext = ctxt; if (ctxt == null) { // should never occur throw _constructError("Unexpected problem: chain of filtered context broken"); } } } // If not, need to read more. If we got any: JsonToken t = delegate.nextToken(); if (t == null) { // no strict need to close, since we have no state here _currToken = t; return t; } // otherwise... to include or not? TokenFilter f; switch (t.id()) { case ID_START_ARRAY: f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildArrayContext(f, true); return (_currToken = t); } if (f == null) { // does this occur? delegate.skipChildren(); break; } // Otherwise still iffy, need to check f = _headContext.checkValue(f); if (f == null) { delegate.skipChildren(); break; } if (f != TokenFilter.INCLUDE_ALL) { f = f.filterStartArray(); } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildArrayContext(f, true); return (_currToken = t); } _headContext = _headContext.createChildArrayContext(f, false); // Also: only need buffering if parent path to be included if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } break; case ID_START_OBJECT: f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildObjectContext(f, true); return (_currToken = t); } if (f == null) { // does this occur? delegate.skipChildren(); break; } // Otherwise still iffy, need to check f = _headContext.checkValue(f); if (f == null) { delegate.skipChildren(); break; } if (f != TokenFilter.INCLUDE_ALL) { f = f.filterStartObject(); } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { _headContext = _headContext.createChildObjectContext(f, true); return (_currToken = t); } _headContext = _headContext.createChildObjectContext(f, false); // Also: only need buffering if parent path to be included if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } // note: inclusion of surrounding Object handled separately via // FIELD_NAME break; case ID_END_ARRAY: case ID_END_OBJECT: { boolean returnEnd = _headContext.isStartHandled(); f = _headContext.getFilter(); if ((f != null) && (f != TokenFilter.INCLUDE_ALL)) { f.filterFinishArray(); } _headContext = _headContext.getParent(); _itemFilter = _headContext.getFilter(); if (returnEnd) { return (_currToken = t); } } break; case ID_FIELD_NAME: { final String name = delegate.getCurrentName(); // note: this will also set 'needToHandleName' f = _headContext.setFieldName(name); if (f == TokenFilter.INCLUDE_ALL) { _itemFilter = f; if (!_includePath) { // Minor twist here: if parent NOT included, may need to induce output of // surrounding START_OBJECT/END_OBJECT if (_includeImmediateParent && !_headContext.isStartHandled()) { t = _headContext.nextTokenToRead(); // returns START_OBJECT but also marks it handled _exposedContext = _headContext; } } return (_currToken = t); } if (f == null) { delegate.nextToken(); delegate.skipChildren(); break; } f = f.includeProperty(name); if (f == null) { delegate.nextToken(); delegate.skipChildren(); break; } _itemFilter = f; if (f == TokenFilter.INCLUDE_ALL) { if (_includePath) { return (_currToken = t); } } if (_includePath) { t = _nextTokenWithBuffering(_headContext); if (t != null) { _currToken = t; return t; } } break; } default: // scalar value f = _itemFilter; if (f == TokenFilter.INCLUDE_ALL) { return (_currToken = t); } if (f != null) { f = _headContext.checkValue(f); if ((f == TokenFilter.INCLUDE_ALL) || ((f != null) && f.includeValue(delegate))) { return (_currToken = t); } } // Otherwise not included (leaves must be explicitly included) break; } // We get here if token was not yet found; offlined handling return _nextToken2(); }
@ Override public JsonToken nextToken ( ) throws IOException { if ( ! _allowMultipleMatches && ( _currToken != null ) && ( _exposedContext == null ) ) { if ( ! _includePath ) { if ( _currToken . isStructEnd ( ) ) { if ( _headContext . isStartHandled ( ) ) { return ( _currToken = null ) ; } } else if ( _currToken . isScalarValue ( ) ) { if ( ! _headContext . isStartHandled ( ) && ( _itemFilter == TokenFilter . INCLUDE_ALL ) ) { return ( _currToken = null ) ; } } } } TokenFilterContext ctxt = _exposedContext ; if ( ctxt != null ) { while ( true ) { JsonToken t = ctxt . nextTokenToRead ( ) ; if ( t != null ) { _currToken = t ; return t ; } if ( ctxt == _headContext ) { _exposedContext = null ; if ( ctxt . inArray ( ) ) { t = delegate . getCurrentToken ( ) ; _currToken = t ; return t ; } break ; } ctxt = _headContext . findChildOf ( ctxt ) ; _exposedContext = ctxt ; if ( ctxt == null ) { throw _constructError ( "Unexpected problem: chain of filtered context broken" ) ; } } } JsonToken t = delegate . nextToken ( ) ; if ( t == null ) { _currToken = t ; return t ; } TokenFilter f ; switch ( t . id ( ) ) { case ID_START_ARRAY : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildArrayContext ( f , true ) ; return ( _currToken = t ) ; } if ( f == null ) { delegate . skipChildren ( ) ; break ; } f = _headContext . checkValue ( f ) ; if ( f == null ) { delegate . skipChildren ( ) ; break ; } if ( f != TokenFilter . INCLUDE_ALL ) { f = f . filterStartArray ( ) ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildArrayContext ( f , true ) ; return ( _currToken = t ) ; } _headContext = _headContext . createChildArrayContext ( f , false ) ; if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; case ID_START_OBJECT : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildObjectContext ( f , true ) ; return ( _currToken = t ) ; } if ( f == null ) { delegate . skipChildren ( ) ; break ; } f = _headContext . checkValue ( f ) ; if ( f == null ) { delegate . skipChildren ( ) ; break ; } if ( f != TokenFilter . INCLUDE_ALL ) { f = f . filterStartObject ( ) ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { _headContext = _headContext . createChildObjectContext ( f , true ) ; return ( _currToken = t ) ; } _headContext = _headContext . createChildObjectContext ( f , false ) ; if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; case ID_END_ARRAY : case ID_END_OBJECT : { boolean returnEnd = _headContext . isStartHandled ( ) ; f = _headContext . getFilter ( ) ; if ( ( f != null ) && ( f != TokenFilter . INCLUDE_ALL ) ) { f . filterFinishArray ( ) ; } _headContext = _headContext . getParent ( ) ; _itemFilter = _headContext . getFilter ( ) ; if ( returnEnd ) { return ( _currToken = t ) ; } } break ; case ID_FIELD_NAME : { final String name = delegate . getCurrentName ( ) ; f = _headContext . setFieldName ( name ) ; if ( f == TokenFilter . INCLUDE_ALL ) { _itemFilter = f ; if ( ! _includePath ) { if ( _includeImmediateParent && ! _headContext . isStartHandled ( ) ) { t = _headContext . nextTokenToRead ( ) ; _exposedContext = _headContext ; } } return ( _currToken = t ) ; } if ( f == null ) { delegate . nextToken ( ) ; delegate . skipChildren ( ) ; break ; } f = f . includeProperty ( name ) ; if ( f == null ) { delegate . nextToken ( ) ; delegate . skipChildren ( ) ; break ; } _itemFilter = f ; if ( f == TokenFilter . INCLUDE_ALL ) { if ( _includePath ) { return ( _currToken = t ) ; } } if ( _includePath ) { t = _nextTokenWithBuffering ( _headContext ) ; if ( t != null ) { _currToken = t ; return t ; } } break ; } default : f = _itemFilter ; if ( f == TokenFilter . INCLUDE_ALL ) { return ( _currToken = t ) ; } if ( f != null ) { f = _headContext . checkValue ( f ) ; if ( ( f == TokenFilter . INCLUDE_ALL ) || ( ( f != null ) && f . includeValue ( delegate ) ) ) { return ( _currToken = t ) ; } } break ; } return _nextToken2 ( ) ; }
Math
32
src/main/java/org/apache/commons/math3/geometry/euclidean/twod/PolygonsSet.java
129
181
BSPTree class and recovery of a Euclidean 3D BRep
New to the work here. Thanks for your efforts on this code. I create a BSPTree from a BoundaryRep (Brep) my test Brep is a cube as represented by a float array containing 8 3D points in(x,y,z) order and an array of indices (12 triplets for the 12 faces of the cube). I construct a BSPMesh() as shown in the code below. I can construct the PolyhedronsSet() but have problems extracting the faces from the BSPTree to reconstruct the BRep. The attached code (BSPMesh2.java) shows that a small change to 1 of the vertex positions causes/corrects the problem. Any ideas?
@Override protected void computeGeometricalProperties() { final Vector2D[][] v = getVertices(); if (v.length == 0) { final BSPTree<Euclidean2D> tree = getTree(false); if ((Boolean) tree.getAttribute()) { // the instance covers the whole space setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { setSize(0); setBarycenter(new Vector2D(0, 0)); } } else if (v[0][0] == null) { // there is at least one open-loop: the polygon is infinite setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { // all loops are closed, we compute some integrals around the shape double sum = 0; double sumX = 0; double sumY = 0; for (Vector2D[] loop : v) { double x1 = loop[loop.length - 1].getX(); double y1 = loop[loop.length - 1].getY(); for (final Vector2D point : loop) { final double x0 = x1; final double y0 = y1; x1 = point.getX(); y1 = point.getY(); final double factor = x0 * y1 - y0 * x1; sum += factor; sumX += factor * (x0 + x1); sumY += factor * (y0 + y1); } } if (sum < 0) { // the polygon as a finite outside surrounded by an infinite inside setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { setSize(sum / 2); setBarycenter(new Vector2D(sumX / (3 * sum), sumY / (3 * sum))); } } }
@ Override protected void computeGeometricalProperties ( ) { final Vector2D [ ] [ ] v = getVertices ( ) ; if ( v . length == 0 ) { final BSPTree < Euclidean2D > tree = getTree ( false ) ; if ( ( Boolean ) tree . getAttribute ( ) ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { setSize ( 0 ) ; setBarycenter ( new Vector2D ( 0 , 0 ) ) ; } } else if ( v [ 0 ] [ 0 ] == null ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { double sum = 0 ; double sumX = 0 ; double sumY = 0 ; for ( Vector2D [ ] loop : v ) { double x1 = loop [ loop . length - 1 ] . getX ( ) ; double y1 = loop [ loop . length - 1 ] . getY ( ) ; for ( final Vector2D point : loop ) { final double x0 = x1 ; final double y0 = y1 ; x1 = point . getX ( ) ; y1 = point . getY ( ) ; final double factor = x0 * y1 - y0 * x1 ; sum += factor ; sumX += factor * ( x0 + x1 ) ; sumY += factor * ( y0 + y1 ) ; } } if ( sum < 0 ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { setSize ( sum / 2 ) ; setBarycenter ( new Vector2D ( sumX / ( 3 * sum ) , sumY / ( 3 * sum ) ) ) ; } } }
@Override protected void computeGeometricalProperties() { final Vector2D[][] v = getVertices(); if (v.length == 0) { final BSPTree<Euclidean2D> tree = getTree(false); if (tree.getCut() == null && (Boolean) tree.getAttribute()) { // the instance covers the whole space setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { setSize(0); setBarycenter(new Vector2D(0, 0)); } } else if (v[0][0] == null) { // there is at least one open-loop: the polygon is infinite setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { // all loops are closed, we compute some integrals around the shape double sum = 0; double sumX = 0; double sumY = 0; for (Vector2D[] loop : v) { double x1 = loop[loop.length - 1].getX(); double y1 = loop[loop.length - 1].getY(); for (final Vector2D point : loop) { final double x0 = x1; final double y0 = y1; x1 = point.getX(); y1 = point.getY(); final double factor = x0 * y1 - y0 * x1; sum += factor; sumX += factor * (x0 + x1); sumY += factor * (y0 + y1); } } if (sum < 0) { // the polygon as a finite outside surrounded by an infinite inside setSize(Double.POSITIVE_INFINITY); setBarycenter(Vector2D.NaN); } else { setSize(sum / 2); setBarycenter(new Vector2D(sumX / (3 * sum), sumY / (3 * sum))); } } }
@ Override protected void computeGeometricalProperties ( ) { final Vector2D [ ] [ ] v = getVertices ( ) ; if ( v . length == 0 ) { final BSPTree < Euclidean2D > tree = getTree ( false ) ; if ( tree . getCut ( ) == null && ( Boolean ) tree . getAttribute ( ) ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { setSize ( 0 ) ; setBarycenter ( new Vector2D ( 0 , 0 ) ) ; } } else if ( v [ 0 ] [ 0 ] == null ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { double sum = 0 ; double sumX = 0 ; double sumY = 0 ; for ( Vector2D [ ] loop : v ) { double x1 = loop [ loop . length - 1 ] . getX ( ) ; double y1 = loop [ loop . length - 1 ] . getY ( ) ; for ( final Vector2D point : loop ) { final double x0 = x1 ; final double y0 = y1 ; x1 = point . getX ( ) ; y1 = point . getY ( ) ; final double factor = x0 * y1 - y0 * x1 ; sum += factor ; sumX += factor * ( x0 + x1 ) ; sumY += factor * ( y0 + y1 ) ; } } if ( sum < 0 ) { setSize ( Double . POSITIVE_INFINITY ) ; setBarycenter ( Vector2D . NaN ) ; } else { setSize ( sum / 2 ) ; setBarycenter ( new Vector2D ( sumX / ( 3 * sum ) , sumY / ( 3 * sum ) ) ) ; } } }
Cli
25
src/java/org/apache/commons/cli/HelpFormatter.java
809
851
infinite loop in the wrapping code of HelpFormatter
If there is not enough space to display a word on a single line, HelpFormatter goes into a infinite loops until the JVM crashes with an OutOfMemoryError. Test case: {code} Options options = new Options(); options.addOption("h", "help", false, "This is a looooong description"); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(20); formatter.printHelp("app", options); // hang & crash {code} An helpful exception indicating the insufficient width would be more appropriate than an OutOfMemoryError.
protected StringBuffer renderWrappedText(StringBuffer sb, int width, int nextLineTabStop, String text) { int pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(rtrim(text)); return sb; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); if (nextLineTabStop >= width) { // stops infinite loop happening nextLineTabStop = width - 1; } // all following lines must be padded with nextLineTabStop space // characters final String padding = createPadding(nextLineTabStop); while (true) { text = padding + text.substring(pos).trim(); pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(text); return sb; } if ( (text.length() > width) && (pos == nextLineTabStop - 1) ) { pos = width; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); } }
protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; if ( nextLineTabStop >= width ) { nextLineTabStop = width - 1 ; } final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } if ( ( text . length ( ) > width ) && ( pos == nextLineTabStop - 1 ) ) { pos = width ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } }
protected StringBuffer renderWrappedText(StringBuffer sb, int width, int nextLineTabStop, String text) { int pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(rtrim(text)); return sb; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); if (nextLineTabStop >= width) { // stops infinite loop happening nextLineTabStop = 1; } // all following lines must be padded with nextLineTabStop space // characters final String padding = createPadding(nextLineTabStop); while (true) { text = padding + text.substring(pos).trim(); pos = findWrapPos(text, width, 0); if (pos == -1) { sb.append(text); return sb; } if ( (text.length() > width) && (pos == nextLineTabStop - 1) ) { pos = width; } sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine); } }
protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; if ( nextLineTabStop >= width ) { nextLineTabStop = 1 ; } final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } if ( ( text . length ( ) > width ) && ( pos == nextLineTabStop - 1 ) ) { pos = width ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } }
Cli
3
src/java/org/apache/commons/cli/TypeHandler.java
158
170
PosixParser interupts "-target opt" as "-t arget opt"
This was posted on the Commons-Developer list and confirmed as a bug. > Is this a bug? Or am I using this incorrectly? > I have an option with short and long values. Given code that is > essentially what is below, with a PosixParser I see results as > follows: > > A command line with just "-t" prints out the results of the catch > block > (OK) > A command line with just "-target" prints out the results of the catch > block (OK) > A command line with just "-t foobar.com" prints out "processing selected > target: foobar.com" (OK) > A command line with just "-target foobar.com" prints out "processing > selected target: arget" (ERROR?) > > ====================================================================== > == > ======================= > private static final String OPTION_TARGET = "t"; > private static final String OPTION_TARGET_LONG = "target"; > // ... > Option generateTarget = new Option(OPTION_TARGET, > OPTION_TARGET_LONG, > true, > "Generate files for the specified > target machine"); > // ... > try { > parsedLine = parser.parse(cmdLineOpts, args); > } catch (ParseException pe) { > System.out.println("Invalid command: " + pe.getMessage() + > "\n"); > HelpFormatter hf = new HelpFormatter(); > hf.printHelp(USAGE, cmdLineOpts); > System.exit(-1); > } > > if (parsedLine.hasOption(OPTION_TARGET)) { > System.out.println("processing selected target: " + > parsedLine.getOptionValue(OPTION_TARGET)); > } It is a bug but it is due to well defined behaviour (so that makes me feel a little better about myself ;). To support *special* (well I call them special anyway) like -Dsystem.property=value we need to be able to examine the first character of an option. If the first character is itself defined as an Option then the remainder of the token is used as the value, e.g. 'D' is the token, it is an option so 'system.property=value' is the argument value for that option. This is the behaviour that we are seeing for your example. 't' is the token, it is an options so 'arget' is the argument value. I suppose a solution to this could be to have a way to specify properties for parsers. In this case 'posix.special.option == true' for turning on *special* options. I'll have a look into this and let you know. Just to keep track of this and to get you used to how we operate, can you log a bug in bugzilla for this. Thanks, -John K
public static Number createNumber(String str) { try { return NumberUtils.createNumber(str); } catch (NumberFormatException nfe) { System.err.println(nfe.getMessage()); } return null; }
public static Number createNumber ( String str ) { try { return NumberUtils . createNumber ( str ) ; } catch ( NumberFormatException nfe ) { System . err . println ( nfe . getMessage ( ) ) ; } return null ; }
public static Number createNumber(String str) { try { if( str != null ) { if( str.indexOf('.') != -1 ) { return Double.valueOf(str); } else { return Long.valueOf(str); } } } catch (NumberFormatException nfe) { System.err.println(nfe.getMessage()); } return null; }
public static Number createNumber ( String str ) { try { if ( str != null ) { if ( str . indexOf ( '.' ) != - 1 ) { return Double . valueOf ( str ) ; } else { return Long . valueOf ( str ) ; } } } catch ( NumberFormatException nfe ) { System . err . println ( nfe . getMessage ( ) ) ; } return null ; }
JacksonDatabind
16
src/main/java/com/fasterxml/jackson/databind/introspect/AnnotationMap.java
107
113
Annotation bundles ignored when added to Mixin
When updating from v 2.4.4 to 2.5.\* it appears as though annotation bundles created with `@JacksonAnnotationsInside` are ignored when placed on a mixin. Moving the annotation bundel to the actual class seems to resolve the issue. Below is a simple test that attempts to rename a property. I have more complicated test cases that are also failing but this should provide some context. ``` java public class Fun { @Test public void test() throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper().addMixIn(Foo.class, FooMixin.class); String result = mapper.writeValueAsString(new Foo("result")); Assert.assertEquals("{\"bar\":\"result\"}", result); } @Target(value={ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD }) @Retention(value=RetentionPolicy.RUNTIME) @JacksonAnnotationsInside @JsonProperty("bar") public @interface ExposeStuff { } public abstract class FooMixin { @ExposeStuff public abstract String getStuff(); } public class Foo { private String stuff; Foo(String stuff) { this.stuff = stuff; } public String getStuff() { return stuff; } } } ``` I'm expecting the "stuff" property to be serialized as "bar". I apologize I haven't been able to identify the culprit (and perhaps it's in my usage). Let me know your thoughts. I'm always happy to provide more details!
protected final boolean _add(Annotation ann) { if (_annotations == null) { _annotations = new HashMap<Class<? extends Annotation>,Annotation>(); } Annotation previous = _annotations.put(ann.annotationType(), ann); return (previous != null) && previous.equals(ann); }
protected final boolean _add ( Annotation ann ) { if ( _annotations == null ) { _annotations = new HashMap < Class < ? extends Annotation > , Annotation > ( ) ; } Annotation previous = _annotations . put ( ann . annotationType ( ) , ann ) ; return ( previous != null ) && previous . equals ( ann ) ; }
protected final boolean _add(Annotation ann) { if (_annotations == null) { _annotations = new HashMap<Class<? extends Annotation>,Annotation>(); } Annotation previous = _annotations.put(ann.annotationType(), ann); return (previous == null) || !previous.equals(ann); }
protected final boolean _add ( Annotation ann ) { if ( _annotations == null ) { _annotations = new HashMap < Class < ? extends Annotation > , Annotation > ( ) ; } Annotation previous = _annotations . put ( ann . annotationType ( ) , ann ) ; return ( previous == null ) || ! previous . equals ( ann ) ; }
Math
24
src/main/java/org/apache/commons/math3/optimization/univariate/BrentOptimizer.java
108
271
"BrentOptimizer" not always reporting the best point
{{BrentOptimizer}} (package "o.a.c.m.optimization.univariate") does not check that the point it is going to return is indeed the best one it has encountered. Indeed, the last evaluated point might be slightly worse than the one before last.
@Override protected UnivariatePointValuePair doOptimize() { final boolean isMinim = getGoalType() == GoalType.MINIMIZE; final double lo = getMin(); final double mid = getStartValue(); final double hi = getMax(); // Optional additional convergence criteria. final ConvergenceChecker<UnivariatePointValuePair> checker = getConvergenceChecker(); double a; double b; if (lo < hi) { a = lo; b = hi; } else { a = hi; b = lo; } double x = mid; double v = x; double w = x; double d = 0; double e = 0; double fx = computeObjectiveValue(x); if (!isMinim) { fx = -fx; } double fv = fx; double fw = fx; UnivariatePointValuePair previous = null; UnivariatePointValuePair current = new UnivariatePointValuePair(x, isMinim ? fx : -fx); int iter = 0; while (true) { final double m = 0.5 * (a + b); final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold; final double tol2 = 2 * tol1; // Default stopping criterion. final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a); if (!stop) { double p = 0; double q = 0; double r = 0; double u = 0; if (FastMath.abs(e) > tol1) { // Fit parabola. r = (x - w) * (fx - fv); q = (x - v) * (fx - fw); p = (x - v) * q - (x - w) * r; q = 2 * (q - r); if (q > 0) { p = -p; } else { q = -q; } r = e; e = d; if (p > q * (a - x) && p < q * (b - x) && FastMath.abs(p) < FastMath.abs(0.5 * q * r)) { // Parabolic interpolation step. d = p / q; u = x + d; // f must not be evaluated too close to a or b. if (u - a < tol2 || b - u < tol2) { if (x <= m) { d = tol1; } else { d = -tol1; } } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } // Update by at least "tol1". if (FastMath.abs(d) < tol1) { if (d >= 0) { u = x + tol1; } else { u = x - tol1; } } else { u = x + d; } double fu = computeObjectiveValue(u); if (!isMinim) { fu = -fu; } // User-defined convergence checker. previous = current; current = new UnivariatePointValuePair(u, isMinim ? fu : -fu); if (checker != null) { if (checker.converged(iter, previous, current)) { return current; } } // Update a, b, v, w and x. if (fu <= fx) { if (u < x) { b = x; } else { a = x; } v = w; fv = fw; w = x; fw = fx; x = u; fx = fu; } else { if (u < x) { a = u; } else { b = u; } if (fu <= fw || Precision.equals(w, x)) { v = w; fv = fw; w = u; fw = fu; } else if (fu <= fv || Precision.equals(v, x) || Precision.equals(v, w)) { v = u; fv = fu; } } } else { // Default termination (Brent's criterion). return current; } ++iter; } }
@ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return current ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return current ; } ++ iter ; } }
@Override protected UnivariatePointValuePair doOptimize() { final boolean isMinim = getGoalType() == GoalType.MINIMIZE; final double lo = getMin(); final double mid = getStartValue(); final double hi = getMax(); // Optional additional convergence criteria. final ConvergenceChecker<UnivariatePointValuePair> checker = getConvergenceChecker(); double a; double b; if (lo < hi) { a = lo; b = hi; } else { a = hi; b = lo; } double x = mid; double v = x; double w = x; double d = 0; double e = 0; double fx = computeObjectiveValue(x); if (!isMinim) { fx = -fx; } double fv = fx; double fw = fx; UnivariatePointValuePair previous = null; UnivariatePointValuePair current = new UnivariatePointValuePair(x, isMinim ? fx : -fx); int iter = 0; while (true) { final double m = 0.5 * (a + b); final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold; final double tol2 = 2 * tol1; // Default stopping criterion. final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a); if (!stop) { double p = 0; double q = 0; double r = 0; double u = 0; if (FastMath.abs(e) > tol1) { // Fit parabola. r = (x - w) * (fx - fv); q = (x - v) * (fx - fw); p = (x - v) * q - (x - w) * r; q = 2 * (q - r); if (q > 0) { p = -p; } else { q = -q; } r = e; e = d; if (p > q * (a - x) && p < q * (b - x) && FastMath.abs(p) < FastMath.abs(0.5 * q * r)) { // Parabolic interpolation step. d = p / q; u = x + d; // f must not be evaluated too close to a or b. if (u - a < tol2 || b - u < tol2) { if (x <= m) { d = tol1; } else { d = -tol1; } } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } // Update by at least "tol1". if (FastMath.abs(d) < tol1) { if (d >= 0) { u = x + tol1; } else { u = x - tol1; } } else { u = x + d; } double fu = computeObjectiveValue(u); if (!isMinim) { fu = -fu; } // User-defined convergence checker. previous = current; current = new UnivariatePointValuePair(u, isMinim ? fu : -fu); if (checker != null) { if (checker.converged(iter, previous, current)) { return best(current, previous, isMinim); } } // Update a, b, v, w and x. if (fu <= fx) { if (u < x) { b = x; } else { a = x; } v = w; fv = fw; w = x; fw = fx; x = u; fx = fu; } else { if (u < x) { a = u; } else { b = u; } if (fu <= fw || Precision.equals(w, x)) { v = w; fv = fw; w = u; fw = fu; } else if (fu <= fv || Precision.equals(v, x) || Precision.equals(v, w)) { v = u; fv = fu; } } } else { // Default termination (Brent's criterion). return best(current, previous, isMinim); } ++iter; } }
@ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return best ( current , previous , isMinim ) ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return best ( current , previous , isMinim ) ; } ++ iter ; } }
JacksonDatabind
105
src/main/java/com/fasterxml/jackson/databind/deser/std/JdkDeserializers.java
28
50
Illegal reflective access operation warning when using `java.lang.Void` as value type
I'm using Jackson (**2.9.7**) through Spring's RestTemplate: ```java ResponseEntity<Void> response = getRestTemplate().exchange( requestUrl, HttpMethod.PATCH, new HttpEntity<>(dto, authHeaders), Void.class ); ``` When [`Void`](https://docs.oracle.com/javase/7/docs/api/java/lang/Void.html) is used to indicate that the ResponseEntity has no body, the following warning appears in the console: ``` WARNING: An illegal reflective access operation has occurred WARNING: Illegal reflective access by com.fasterxml.jackson.databind.util.ClassUtil (file:/<snip>repository/com/fasterxml/jackson/core/jackson-databind/2.9.7/jackson-databind-2.9.7.jar) to constructor java.lang.Void() WARNING: Please consider reporting this to the maintainers of com.fasterxml.jackson.databind.util.ClassUtil WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations WARNING: All illegal access operations will be denied in a future release ``` The problem disappears if `String` is used as generic type.
public static JsonDeserializer<?> find(Class<?> rawType, String clsName) { if (_classNames.contains(clsName)) { JsonDeserializer<?> d = FromStringDeserializer.findDeserializer(rawType); if (d != null) { return d; } if (rawType == UUID.class) { return new UUIDDeserializer(); } if (rawType == StackTraceElement.class) { return new StackTraceElementDeserializer(); } if (rawType == AtomicBoolean.class) { // (note: AtomicInteger/Long work due to single-arg constructor. For now? return new AtomicBooleanDeserializer(); } if (rawType == ByteBuffer.class) { return new ByteBufferDeserializer(); } } return null; }
public static JsonDeserializer < ? > find ( Class < ? > rawType , String clsName ) { if ( _classNames . contains ( clsName ) ) { JsonDeserializer < ? > d = FromStringDeserializer . findDeserializer ( rawType ) ; if ( d != null ) { return d ; } if ( rawType == UUID . class ) { return new UUIDDeserializer ( ) ; } if ( rawType == StackTraceElement . class ) { return new StackTraceElementDeserializer ( ) ; } if ( rawType == AtomicBoolean . class ) { return new AtomicBooleanDeserializer ( ) ; } if ( rawType == ByteBuffer . class ) { return new ByteBufferDeserializer ( ) ; } } return null ; }
public static JsonDeserializer<?> find(Class<?> rawType, String clsName) { if (_classNames.contains(clsName)) { JsonDeserializer<?> d = FromStringDeserializer.findDeserializer(rawType); if (d != null) { return d; } if (rawType == UUID.class) { return new UUIDDeserializer(); } if (rawType == StackTraceElement.class) { return new StackTraceElementDeserializer(); } if (rawType == AtomicBoolean.class) { // (note: AtomicInteger/Long work due to single-arg constructor. For now? return new AtomicBooleanDeserializer(); } if (rawType == ByteBuffer.class) { return new ByteBufferDeserializer(); } if (rawType == Void.class) { return NullifyingDeserializer.instance; } } return null; }
public static JsonDeserializer < ? > find ( Class < ? > rawType , String clsName ) { if ( _classNames . contains ( clsName ) ) { JsonDeserializer < ? > d = FromStringDeserializer . findDeserializer ( rawType ) ; if ( d != null ) { return d ; } if ( rawType == UUID . class ) { return new UUIDDeserializer ( ) ; } if ( rawType == StackTraceElement . class ) { return new StackTraceElementDeserializer ( ) ; } if ( rawType == AtomicBoolean . class ) { return new AtomicBooleanDeserializer ( ) ; } if ( rawType == ByteBuffer . class ) { return new ByteBufferDeserializer ( ) ; } if ( rawType == Void . class ) { return NullifyingDeserializer . instance ; } } return null ; }
Collections
26
src/main/java/org/apache/commons/collections4/keyvalue/MultiKey.java
277
280
MultiKey subclassing has deserialization problem since COLLECTIONS-266: either declare protected readResolve() or MultiKey must be final
MultiKey from collections 4 provides a transient hashCode and a *private* readResolve to resolve COLLECTIONS-266: Issue with MultiKey when serialized/deserialized via RMI. Unfortunately the solution does not work in case of *subclassing*: readResolve in MultiKey should be declared *protected* readResolve() to be called during deserialization of the subclass. Otherwise MultiKey must be final to avoid such subclassing. *Testcase*: {code:java|title=MultiKeySerializationTest.java} package de.ivu.test.common.collections4; import static org.junit.Assert.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import org.apache.commons.collections4.keyvalue.MultiKey; import org.junit.Test; public class MultiKeySerializationTest { @Test @SuppressWarnings("unchecked") public void testReadResolveEqualHashCode() throws IOException, ClassNotFoundException { class MultiKey2<A, B> extends MultiKey { private static final long serialVersionUID = 1928896152249821416L; public MultiKey2(A key1, B key2) { super(key1, key2); } public A getFirst() { return (A) getKey(0); } public B getSecond() { return (B) getKey(1); } // FIXME: MultiKey should either declare protected readResolve() or must be final. } MultiKey2<String, String> one = new MultiKey2<>("bla", "blub"); System.out.println(one.hashCode()); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(byteOut); out.writeObject(one); out.close(); byte[] serialized = byteOut.toByteArray(); ByteArrayInputStream byteIn = new ByteArrayInputStream(serialized); ObjectInputStream in = new ObjectInputStream(byteIn); MultiKey2<String, String> two = (MultiKey2<String, String>) in.readObject(); System.out.println(two.hashCode()); assertEquals("hashCode must be equal - please check for protected readResolve in MultiKey*", one.hashCode(), two.hashCode()); } } {code} *Fix:* {code:java|title=MultiKey.java} @@ -274,7 +274,7 @@ * only stable for the same process). * @return the instance with recalculated hash code */ - private Object readResolve() { + protected Object readResolve() { calculateHashCode(keys); return this; } {code}
private Object readResolve() { calculateHashCode(keys); return this; }
private Object readResolve ( ) { calculateHashCode ( keys ) ; return this ; }
protected Object readResolve() { calculateHashCode(keys); return this; }
protected Object readResolve ( ) { calculateHashCode ( keys ) ; return this ; }
Chart
5
source/org/jfree/data/xy/XYSeries.java
540
576
XYSeries.addOrUpdate() should add if duplicates are allowed
Copied from this post (by Ted Schwartz) in the forum: http://www.jfree.org/phpBB2/viewtopic.php?t=24523 I've found a bug in jfreechart-1.0.9 code for org.jfree.data.xy.XYSeries. There was a change some time ago which introduced the notion of allowing duplicate X values in XYSeries data. The method addOrUpdate(Number x, Number y) was never modified to support this, and therefore duplicate data were overwriting existing data. This is the fix I've made, but I don't know how to submit a patch... $ diff original/jfreechart-1.0.9/source/org/jfree/data/xy/XYSeries.java fixed/org/jfree/data/xy/XYSeries.java 537c537 < if (index >= 0) { --- > if (index >= 0 && !allowDuplicateXValues) { 545a546,559 > } else if (index >= 0){ > XYDataItem item = new XYDataItem(x, y); > // need to make sure we are adding *after* any duplicates > int size = this.data.size(); > while (index < size > && item.compareTo(this.data.get(index)) == 0) { > index++; > } > if (index < this.data.size()) { > this.data.add(index, item); > } > else { > this.data.add(item); > } 558,561d571 < // check if this addition will exceed the maximum item count... < if (getItemCount() > this.maximumItemCount) { < this.data.remove(0); < } 562a573,576 > // check if this addition will exceed the maximum item count... > if (getItemCount() > this.maximumItemCount) { > this.data.remove(0); > }
public XYDataItem addOrUpdate(Number x, Number y) { if (x == null) { throw new IllegalArgumentException("Null 'x' argument."); } // if we get to here, we know that duplicate X values are not permitted XYDataItem overwritten = null; int index = indexOf(x); if (index >= 0 && !this.allowDuplicateXValues) { XYDataItem existing = (XYDataItem) this.data.get(index); try { overwritten = (XYDataItem) existing.clone(); } catch (CloneNotSupportedException e) { throw new SeriesException("Couldn't clone XYDataItem!"); } existing.setY(y); } else { // if the series is sorted, the negative index is a result from // Collections.binarySearch() and tells us where to insert the // new item...otherwise it will be just -1 and we should just // append the value to the list... if (this.autoSort) { this.data.add(-index - 1, new XYDataItem(x, y)); } else { this.data.add(new XYDataItem(x, y)); } // check if this addition will exceed the maximum item count... if (getItemCount() > this.maximumItemCount) { this.data.remove(0); } } fireSeriesChanged(); return overwritten; }
public XYDataItem addOrUpdate ( Number x , Number y ) { if ( x == null ) { throw new IllegalArgumentException ( "Null 'x' argument." ) ; } XYDataItem overwritten = null ; int index = indexOf ( x ) ; if ( index >= 0 && ! this . allowDuplicateXValues ) { XYDataItem existing = ( XYDataItem ) this . data . get ( index ) ; try { overwritten = ( XYDataItem ) existing . clone ( ) ; } catch ( CloneNotSupportedException e ) { throw new SeriesException ( "Couldn't clone XYDataItem!" ) ; } existing . setY ( y ) ; } else { if ( this . autoSort ) { this . data . add ( - index - 1 , new XYDataItem ( x , y ) ) ; } else { this . data . add ( new XYDataItem ( x , y ) ) ; } if ( getItemCount ( ) > this . maximumItemCount ) { this . data . remove ( 0 ) ; } } fireSeriesChanged ( ) ; return overwritten ; }
public XYDataItem addOrUpdate(Number x, Number y) { if (x == null) { throw new IllegalArgumentException("Null 'x' argument."); } if (this.allowDuplicateXValues) { add(x, y); return null; } // if we get to here, we know that duplicate X values are not permitted XYDataItem overwritten = null; int index = indexOf(x); if (index >= 0) { XYDataItem existing = (XYDataItem) this.data.get(index); try { overwritten = (XYDataItem) existing.clone(); } catch (CloneNotSupportedException e) { throw new SeriesException("Couldn't clone XYDataItem!"); } existing.setY(y); } else { // if the series is sorted, the negative index is a result from // Collections.binarySearch() and tells us where to insert the // new item...otherwise it will be just -1 and we should just // append the value to the list... if (this.autoSort) { this.data.add(-index - 1, new XYDataItem(x, y)); } else { this.data.add(new XYDataItem(x, y)); } // check if this addition will exceed the maximum item count... if (getItemCount() > this.maximumItemCount) { this.data.remove(0); } } fireSeriesChanged(); return overwritten; }
public XYDataItem addOrUpdate ( Number x , Number y ) { if ( x == null ) { throw new IllegalArgumentException ( "Null 'x' argument." ) ; } if ( this . allowDuplicateXValues ) { add ( x , y ) ; return null ; } XYDataItem overwritten = null ; int index = indexOf ( x ) ; if ( index >= 0 ) { XYDataItem existing = ( XYDataItem ) this . data . get ( index ) ; try { overwritten = ( XYDataItem ) existing . clone ( ) ; } catch ( CloneNotSupportedException e ) { throw new SeriesException ( "Couldn't clone XYDataItem!" ) ; } existing . setY ( y ) ; } else { if ( this . autoSort ) { this . data . add ( - index - 1 , new XYDataItem ( x , y ) ) ; } else { this . data . add ( new XYDataItem ( x , y ) ) ; } if ( getItemCount ( ) > this . maximumItemCount ) { this . data . remove ( 0 ) ; } } fireSeriesChanged ( ) ; return overwritten ; }
Math
73
src/main/java/org/apache/commons/math/analysis/solvers/BrentSolver.java
98
140
Brent solver doesn't throw IllegalArgumentException when initial guess has the wrong sign
Javadoc for "public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial)" claims that "if the values of the function at the three points have the same sign" an IllegalArgumentException is thrown. This case isn't even checked.
public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { clearResult(); verifySequence(min, initial, max); // return the initial guess if it is good enough double yInitial = f.value(initial); if (Math.abs(yInitial) <= functionValueAccuracy) { setResult(initial, 0); return result; } // return the first endpoint if it is good enough double yMin = f.value(min); if (Math.abs(yMin) <= functionValueAccuracy) { setResult(yMin, 0); return result; } // reduce interval if min and initial bracket the root if (yInitial * yMin < 0) { return solve(f, min, yMin, initial, yInitial, min, yMin); } // return the second endpoint if it is good enough double yMax = f.value(max); if (Math.abs(yMax) <= functionValueAccuracy) { setResult(yMax, 0); return result; } // reduce interval if initial and max bracket the root if (yInitial * yMax < 0) { return solve(f, initial, yInitial, max, yMax, initial, yInitial); } // full Brent algorithm starting with provided initial guess return solve(f, min, yMin, max, yMax, initial, yInitial); }
public double solve ( final UnivariateRealFunction f , final double min , final double max , final double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifySequence ( min , initial , max ) ; double yInitial = f . value ( initial ) ; if ( Math . abs ( yInitial ) <= functionValueAccuracy ) { setResult ( initial , 0 ) ; return result ; } double yMin = f . value ( min ) ; if ( Math . abs ( yMin ) <= functionValueAccuracy ) { setResult ( yMin , 0 ) ; return result ; } if ( yInitial * yMin < 0 ) { return solve ( f , min , yMin , initial , yInitial , min , yMin ) ; } double yMax = f . value ( max ) ; if ( Math . abs ( yMax ) <= functionValueAccuracy ) { setResult ( yMax , 0 ) ; return result ; } if ( yInitial * yMax < 0 ) { return solve ( f , initial , yInitial , max , yMax , initial , yInitial ) ; } return solve ( f , min , yMin , max , yMax , initial , yInitial ) ; }
public double solve(final UnivariateRealFunction f, final double min, final double max, final double initial) throws MaxIterationsExceededException, FunctionEvaluationException { clearResult(); verifySequence(min, initial, max); // return the initial guess if it is good enough double yInitial = f.value(initial); if (Math.abs(yInitial) <= functionValueAccuracy) { setResult(initial, 0); return result; } // return the first endpoint if it is good enough double yMin = f.value(min); if (Math.abs(yMin) <= functionValueAccuracy) { setResult(yMin, 0); return result; } // reduce interval if min and initial bracket the root if (yInitial * yMin < 0) { return solve(f, min, yMin, initial, yInitial, min, yMin); } // return the second endpoint if it is good enough double yMax = f.value(max); if (Math.abs(yMax) <= functionValueAccuracy) { setResult(yMax, 0); return result; } // reduce interval if initial and max bracket the root if (yInitial * yMax < 0) { return solve(f, initial, yInitial, max, yMax, initial, yInitial); } if (yMin * yMax > 0) { throw MathRuntimeException.createIllegalArgumentException( NON_BRACKETING_MESSAGE, min, max, yMin, yMax); } // full Brent algorithm starting with provided initial guess return solve(f, min, yMin, max, yMax, initial, yInitial); }
public double solve ( final UnivariateRealFunction f , final double min , final double max , final double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifySequence ( min , initial , max ) ; double yInitial = f . value ( initial ) ; if ( Math . abs ( yInitial ) <= functionValueAccuracy ) { setResult ( initial , 0 ) ; return result ; } double yMin = f . value ( min ) ; if ( Math . abs ( yMin ) <= functionValueAccuracy ) { setResult ( yMin , 0 ) ; return result ; } if ( yInitial * yMin < 0 ) { return solve ( f , min , yMin , initial , yInitial , min , yMin ) ; } double yMax = f . value ( max ) ; if ( Math . abs ( yMax ) <= functionValueAccuracy ) { setResult ( yMax , 0 ) ; return result ; } if ( yInitial * yMax < 0 ) { return solve ( f , initial , yInitial , max , yMax , initial , yInitial ) ; } if ( yMin * yMax > 0 ) { throw MathRuntimeException . createIllegalArgumentException ( NON_BRACKETING_MESSAGE , min , max , yMin , yMax ) ; } return solve ( f , min , yMin , max , yMax , initial , yInitial ) ; }
JacksonDatabind
6
src/main/java/com/fasterxml/jackson/databind/util/StdDateFormat.java
359
434
Add Support for Parsing All Compliant ISO-8601 Date Formats
Some providers create JSON date stamps in ISO-8601 formats that cannot be parsed by the jackson-databind library. Here is a sampling of some valid formats that do not parse correctly: 2014-10-03T18:00:00.6-05:00 2014-10-03T18:00:00.61-05:00 1997-07-16T19:20+01:00 1997-07-16T19:20:30.45+01:00 The last two actually come from the ISO-8601 notes on http://www.w3.org/TR/NOTE-datetime.
protected Date parseAsISO8601(String dateStr, ParsePosition pos) { /* 21-May-2009, tatu: DateFormat has very strict handling of * timezone modifiers for ISO-8601. So we need to do some scrubbing. */ /* First: do we have "zulu" format ('Z' == "GMT")? If yes, that's * quite simple because we already set date format timezone to be * GMT, and hence can just strip out 'Z' altogether */ int len = dateStr.length(); char c = dateStr.charAt(len-1); DateFormat df; // [JACKSON-200]: need to support "plain" date... if (len <= 10 && Character.isDigit(c)) { df = _formatPlain; if (df == null) { df = _formatPlain = _cloneFormat(DATE_FORMAT_PLAIN, DATE_FORMAT_STR_PLAIN, _timezone, _locale); } } else if (c == 'Z') { df = _formatISO8601_z; if (df == null) { df = _formatISO8601_z = _cloneFormat(DATE_FORMAT_ISO8601_Z, DATE_FORMAT_STR_ISO8601_Z, _timezone, _locale); } // [JACKSON-334]: may be missing milliseconds... if so, add if (dateStr.charAt(len-4) == ':') { StringBuilder sb = new StringBuilder(dateStr); sb.insert(len-1, ".000"); dateStr = sb.toString(); } } else { // Let's see if we have timezone indicator or not... if (hasTimeZone(dateStr)) { c = dateStr.charAt(len-3); if (c == ':') { // remove optional colon // remove colon StringBuilder sb = new StringBuilder(dateStr); sb.delete(len-3, len-2); dateStr = sb.toString(); } else if (c == '+' || c == '-') { // missing minutes // let's just append '00' dateStr += "00"; } // Milliseconds partial or missing; and even seconds are optional len = dateStr.length(); // remove 'T', '+'/'-' and 4-digit timezone-offset c = dateStr.charAt(len-9); if (Character.isDigit(c)) { StringBuilder sb = new StringBuilder(dateStr); sb.insert(len-5, ".000"); dateStr = sb.toString(); } df = _formatISO8601; if (_formatISO8601 == null) { df = _formatISO8601 = _cloneFormat(DATE_FORMAT_ISO8601, DATE_FORMAT_STR_ISO8601, _timezone, _locale); } } else { // If not, plain date. Easiest to just patch 'Z' in the end? StringBuilder sb = new StringBuilder(dateStr); // And possible also millisecond part if missing int timeLen = len - dateStr.lastIndexOf('T') - 1; if (timeLen <= 8) { sb.append(".000"); } sb.append('Z'); dateStr = sb.toString(); df = _formatISO8601_z; if (df == null) { df = _formatISO8601_z = _cloneFormat(DATE_FORMAT_ISO8601_Z, DATE_FORMAT_STR_ISO8601_Z, _timezone, _locale); } } } return df.parse(dateStr, pos); }
protected Date parseAsISO8601 ( String dateStr , ParsePosition pos ) { int len = dateStr . length ( ) ; char c = dateStr . charAt ( len - 1 ) ; DateFormat df ; if ( len <= 10 && Character . isDigit ( c ) ) { df = _formatPlain ; if ( df == null ) { df = _formatPlain = _cloneFormat ( DATE_FORMAT_PLAIN , DATE_FORMAT_STR_PLAIN , _timezone , _locale ) ; } } else if ( c == 'Z' ) { df = _formatISO8601_z ; if ( df == null ) { df = _formatISO8601_z = _cloneFormat ( DATE_FORMAT_ISO8601_Z , DATE_FORMAT_STR_ISO8601_Z , _timezone , _locale ) ; } if ( dateStr . charAt ( len - 4 ) == ':' ) { StringBuilder sb = new StringBuilder ( dateStr ) ; sb . insert ( len - 1 , ".000" ) ; dateStr = sb . toString ( ) ; } } else { if ( hasTimeZone ( dateStr ) ) { c = dateStr . charAt ( len - 3 ) ; if ( c == ':' ) { StringBuilder sb = new StringBuilder ( dateStr ) ; sb . delete ( len - 3 , len - 2 ) ; dateStr = sb . toString ( ) ; } else if ( c == '+' || c == '-' ) { dateStr += "00" ; } len = dateStr . length ( ) ; c = dateStr . charAt ( len - 9 ) ; if ( Character . isDigit ( c ) ) { StringBuilder sb = new StringBuilder ( dateStr ) ; sb . insert ( len - 5 , ".000" ) ; dateStr = sb . toString ( ) ; } df = _formatISO8601 ; if ( _formatISO8601 == null ) { df = _formatISO8601 = _cloneFormat ( DATE_FORMAT_ISO8601 , DATE_FORMAT_STR_ISO8601 , _timezone , _locale ) ; } } else { StringBuilder sb = new StringBuilder ( dateStr ) ; int timeLen = len - dateStr . lastIndexOf ( 'T' ) - 1 ; if ( timeLen <= 8 ) { sb . append ( ".000" ) ; } sb . append ( 'Z' ) ; dateStr = sb . toString ( ) ; df = _formatISO8601_z ; if ( df == null ) { df = _formatISO8601_z = _cloneFormat ( DATE_FORMAT_ISO8601_Z , DATE_FORMAT_STR_ISO8601_Z , _timezone , _locale ) ; } } } return df . parse ( dateStr , pos ) ; }
protected Date parseAsISO8601(String dateStr, ParsePosition pos) { /* 21-May-2009, tatu: DateFormat has very strict handling of * timezone modifiers for ISO-8601. So we need to do some scrubbing. */ /* First: do we have "zulu" format ('Z' == "GMT")? If yes, that's * quite simple because we already set date format timezone to be * GMT, and hence can just strip out 'Z' altogether */ int len = dateStr.length(); char c = dateStr.charAt(len-1); DateFormat df; // [JACKSON-200]: need to support "plain" date... if (len <= 10 && Character.isDigit(c)) { df = _formatPlain; if (df == null) { df = _formatPlain = _cloneFormat(DATE_FORMAT_PLAIN, DATE_FORMAT_STR_PLAIN, _timezone, _locale); } } else if (c == 'Z') { df = _formatISO8601_z; if (df == null) { df = _formatISO8601_z = _cloneFormat(DATE_FORMAT_ISO8601_Z, DATE_FORMAT_STR_ISO8601_Z, _timezone, _locale); } // [JACKSON-334]: may be missing milliseconds... if so, add if (dateStr.charAt(len-4) == ':') { StringBuilder sb = new StringBuilder(dateStr); sb.insert(len-1, ".000"); dateStr = sb.toString(); } } else { // Let's see if we have timezone indicator or not... if (hasTimeZone(dateStr)) { c = dateStr.charAt(len-3); if (c == ':') { // remove optional colon // remove colon StringBuilder sb = new StringBuilder(dateStr); sb.delete(len-3, len-2); dateStr = sb.toString(); } else if (c == '+' || c == '-') { // missing minutes // let's just append '00' dateStr += "00"; } // Milliseconds partial or missing; and even seconds are optional len = dateStr.length(); // remove 'T', '+'/'-' and 4-digit timezone-offset int timeLen = len - dateStr.lastIndexOf('T') - 6; if (timeLen < 12) { // 8 for hh:mm:ss, 4 for .sss int offset = len - 5; // insertion offset, before tz-offset StringBuilder sb = new StringBuilder(dateStr); switch (timeLen) { case 11: sb.insert(offset, '0'); break; case 10: sb.insert(offset, "00"); break; case 9: // is this legal? (just second fraction marker) sb.insert(offset, "000"); break; case 8: sb.insert(offset, ".000"); break; case 7: // not legal to have single-digit second break; case 6: // probably not legal, but let's allow sb.insert(offset, "00.000"); case 5: // is legal to omit seconds sb.insert(offset, ":00.000"); } dateStr = sb.toString(); } df = _formatISO8601; if (_formatISO8601 == null) { df = _formatISO8601 = _cloneFormat(DATE_FORMAT_ISO8601, DATE_FORMAT_STR_ISO8601, _timezone, _locale); } } else { // If not, plain date. Easiest to just patch 'Z' in the end? StringBuilder sb = new StringBuilder(dateStr); // And possible also millisecond part if missing int timeLen = len - dateStr.lastIndexOf('T') - 1; if (timeLen < 12) { // missing, or partial switch (timeLen) { case 11: sb.append('0'); case 10: sb.append('0'); case 9: sb.append('0'); break; default: sb.append(".000"); } } sb.append('Z'); dateStr = sb.toString(); df = _formatISO8601_z; if (df == null) { df = _formatISO8601_z = _cloneFormat(DATE_FORMAT_ISO8601_Z, DATE_FORMAT_STR_ISO8601_Z, _timezone, _locale); } } } return df.parse(dateStr, pos); }
protected Date parseAsISO8601 ( String dateStr , ParsePosition pos ) { int len = dateStr . length ( ) ; char c = dateStr . charAt ( len - 1 ) ; DateFormat df ; if ( len <= 10 && Character . isDigit ( c ) ) { df = _formatPlain ; if ( df == null ) { df = _formatPlain = _cloneFormat ( DATE_FORMAT_PLAIN , DATE_FORMAT_STR_PLAIN , _timezone , _locale ) ; } } else if ( c == 'Z' ) { df = _formatISO8601_z ; if ( df == null ) { df = _formatISO8601_z = _cloneFormat ( DATE_FORMAT_ISO8601_Z , DATE_FORMAT_STR_ISO8601_Z , _timezone , _locale ) ; } if ( dateStr . charAt ( len - 4 ) == ':' ) { StringBuilder sb = new StringBuilder ( dateStr ) ; sb . insert ( len - 1 , ".000" ) ; dateStr = sb . toString ( ) ; } } else { if ( hasTimeZone ( dateStr ) ) { c = dateStr . charAt ( len - 3 ) ; if ( c == ':' ) { StringBuilder sb = new StringBuilder ( dateStr ) ; sb . delete ( len - 3 , len - 2 ) ; dateStr = sb . toString ( ) ; } else if ( c == '+' || c == '-' ) { dateStr += "00" ; } len = dateStr . length ( ) ; int timeLen = len - dateStr . lastIndexOf ( 'T' ) - 6 ; if ( timeLen < 12 ) { int offset = len - 5 ; StringBuilder sb = new StringBuilder ( dateStr ) ; switch ( timeLen ) { case 11 : sb . insert ( offset , '0' ) ; break ; case 10 : sb . insert ( offset , "00" ) ; break ; case 9 : sb . insert ( offset , "000" ) ; break ; case 8 : sb . insert ( offset , ".000" ) ; break ; case 7 : break ; case 6 : sb . insert ( offset , "00.000" ) ; case 5 : sb . insert ( offset , ":00.000" ) ; } dateStr = sb . toString ( ) ; } df = _formatISO8601 ; if ( _formatISO8601 == null ) { df = _formatISO8601 = _cloneFormat ( DATE_FORMAT_ISO8601 , DATE_FORMAT_STR_ISO8601 , _timezone , _locale ) ; } } else { StringBuilder sb = new StringBuilder ( dateStr ) ; int timeLen = len - dateStr . lastIndexOf ( 'T' ) - 1 ; if ( timeLen < 12 ) { switch ( timeLen ) { case 11 : sb . append ( '0' ) ; case 10 : sb . append ( '0' ) ; case 9 : sb . append ( '0' ) ; break ; default : sb . append ( ".000" ) ; } } sb . append ( 'Z' ) ; dateStr = sb . toString ( ) ; df = _formatISO8601_z ; if ( df == null ) { df = _formatISO8601_z = _cloneFormat ( DATE_FORMAT_ISO8601_Z , DATE_FORMAT_STR_ISO8601_Z , _timezone , _locale ) ; } } } return df . parse ( dateStr , pos ) ; }
JacksonDatabind
82
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
472
615
`JsonIgnoreProperties.allowSetters` is not working in Jackson 2.8
``` @JsonIgnoreProperties(value = { "password" }, ignoreUnknown = true, allowSetters = true) public class JsonTest { private String username; private String password; public JsonTest() { super(); // TODO Auto-generated constructor stub } public JsonTest(String username, String password) { super(); this.username = username; this.password = password; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public static void main(String[] args) { ObjectMapper mapper = new ObjectMapper(); JsonTest json = new JsonTest("user", "password"); try { System.out.println(mapper.writeValueAsString(json)); } catch (JsonProcessingException e) { // TODO Auto-generated catch block e.printStackTrace(); } String jsonString = "{ \"username\":\"username\",\"password\":\"password\" }"; try { json = mapper.readValue(jsonString, JsonTest.class); System.out.println(json.getPassword()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } ``` the version is 2.8.7. the password cannot deserialize. the output is: {"username":"user"} null
protected void addBeanProps(DeserializationContext ctxt, BeanDescription beanDesc, BeanDeserializerBuilder builder) throws JsonMappingException { final boolean isConcrete = !beanDesc.getType().isAbstract(); final SettableBeanProperty[] creatorProps = isConcrete ? builder.getValueInstantiator().getFromObjectArguments(ctxt.getConfig()) : null; final boolean hasCreatorProps = (creatorProps != null); // 01-May-2016, tatu: Which base type to use here gets tricky, since // it may often make most sense to use general type for overrides, // but what we have here may be more specific impl type. But for now // just use it as is. JsonIgnoreProperties.Value ignorals = ctxt.getConfig() .getDefaultPropertyIgnorals(beanDesc.getBeanClass(), beanDesc.getClassInfo()); Set<String> ignored; if (ignorals != null) { boolean ignoreAny = ignorals.getIgnoreUnknown(); builder.setIgnoreUnknownProperties(ignoreAny); // Or explicit/implicit definitions? ignored = ignorals.getIgnored(); for (String propName : ignored) { builder.addIgnorable(propName); } } else { ignored = Collections.emptySet(); } // Also, do we have a fallback "any" setter? AnnotatedMethod anySetterMethod = beanDesc.findAnySetter(); AnnotatedMember anySetterField = null; if (anySetterMethod != null) { builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetterMethod)); } else { anySetterField = beanDesc.findAnySetterField(); if(anySetterField != null) { builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetterField)); } } // NOTE: we do NOT add @JsonIgnore'd properties into blocked ones if there's any-setter // Implicit ones via @JsonIgnore and equivalent? if (anySetterMethod == null && anySetterField == null) { Collection<String> ignored2 = beanDesc.getIgnoredPropertyNames(); if (ignored2 != null) { for (String propName : ignored2) { // allow ignoral of similarly named JSON property, but do not force; // latter means NOT adding this to 'ignored': builder.addIgnorable(propName); } } } final boolean useGettersAsSetters = ctxt.isEnabled(MapperFeature.USE_GETTERS_AS_SETTERS) && ctxt.isEnabled(MapperFeature.AUTO_DETECT_GETTERS); // Ok: let's then filter out property definitions List<BeanPropertyDefinition> propDefs = filterBeanProps(ctxt, beanDesc, builder, beanDesc.findProperties(), ignored); // After which we can let custom code change the set if (_factoryConfig.hasDeserializerModifiers()) { for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) { propDefs = mod.updateProperties(ctxt.getConfig(), beanDesc, propDefs); } } // At which point we still have all kinds of properties; not all with mutators: for (BeanPropertyDefinition propDef : propDefs) { SettableBeanProperty prop = null; /* 18-Oct-2013, tatu: Although constructor parameters have highest precedence, * we need to do linkage (as per [databind#318]), and so need to start with * other types, and only then create constructor parameter, if any. */ if (propDef.hasSetter()) { JavaType propertyType = propDef.getSetter().getParameterType(0); prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType); } else if (propDef.hasField()) { JavaType propertyType = propDef.getField().getType(); prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType); } else if (useGettersAsSetters && propDef.hasGetter()) { /* May also need to consider getters * for Map/Collection properties; but with lowest precedence */ AnnotatedMethod getter = propDef.getGetter(); // should only consider Collections and Maps, for now? Class<?> rawPropertyType = getter.getRawType(); if (Collection.class.isAssignableFrom(rawPropertyType) || Map.class.isAssignableFrom(rawPropertyType)) { prop = constructSetterlessProperty(ctxt, beanDesc, propDef); } } // 25-Sep-2014, tatu: No point in finding constructor parameters for abstract types // (since they are never used anyway) if (hasCreatorProps && propDef.hasConstructorParameter()) { /* If property is passed via constructor parameter, we must * handle things in special way. Not sure what is the most optimal way... * for now, let's just call a (new) method in builder, which does nothing. */ // but let's call a method just to allow custom builders to be aware... final String name = propDef.getName(); CreatorProperty cprop = null; if (creatorProps != null) { for (SettableBeanProperty cp : creatorProps) { if (name.equals(cp.getName()) && (cp instanceof CreatorProperty)) { cprop = (CreatorProperty) cp; break; } } } if (cprop == null) { List<String> n = new ArrayList<>(); for (SettableBeanProperty cp : creatorProps) { n.add(cp.getName()); } ctxt.reportBadPropertyDefinition(beanDesc, propDef, "Could not find creator property with name '%s' (known Creator properties: %s)", name, n); continue; } if (prop != null) { cprop.setFallbackSetter(prop); } prop = cprop; builder.addCreatorProperty(cprop); continue; } if (prop != null) { Class<?>[] views = propDef.findViews(); if (views == null) { // one more twist: if default inclusion disabled, need to force empty set of views if (!ctxt.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)) { views = NO_VIEWS; } } // one more thing before adding to builder: copy any metadata prop.setViews(views); builder.addProperty(prop); } } }
protected void addBeanProps ( DeserializationContext ctxt , BeanDescription beanDesc , BeanDeserializerBuilder builder ) throws JsonMappingException { final boolean isConcrete = ! beanDesc . getType ( ) . isAbstract ( ) ; final SettableBeanProperty [ ] creatorProps = isConcrete ? builder . getValueInstantiator ( ) . getFromObjectArguments ( ctxt . getConfig ( ) ) : null ; final boolean hasCreatorProps = ( creatorProps != null ) ; JsonIgnoreProperties . Value ignorals = ctxt . getConfig ( ) . getDefaultPropertyIgnorals ( beanDesc . getBeanClass ( ) , beanDesc . getClassInfo ( ) ) ; Set < String > ignored ; if ( ignorals != null ) { boolean ignoreAny = ignorals . getIgnoreUnknown ( ) ; builder . setIgnoreUnknownProperties ( ignoreAny ) ; ignored = ignorals . getIgnored ( ) ; for ( String propName : ignored ) { builder . addIgnorable ( propName ) ; } } else { ignored = Collections . emptySet ( ) ; } AnnotatedMethod anySetterMethod = beanDesc . findAnySetter ( ) ; AnnotatedMember anySetterField = null ; if ( anySetterMethod != null ) { builder . setAnySetter ( constructAnySetter ( ctxt , beanDesc , anySetterMethod ) ) ; } else { anySetterField = beanDesc . findAnySetterField ( ) ; if ( anySetterField != null ) { builder . setAnySetter ( constructAnySetter ( ctxt , beanDesc , anySetterField ) ) ; } } if ( anySetterMethod == null && anySetterField == null ) { Collection < String > ignored2 = beanDesc . getIgnoredPropertyNames ( ) ; if ( ignored2 != null ) { for ( String propName : ignored2 ) { builder . addIgnorable ( propName ) ; } } } final boolean useGettersAsSetters = ctxt . isEnabled ( MapperFeature . USE_GETTERS_AS_SETTERS ) && ctxt . isEnabled ( MapperFeature . AUTO_DETECT_GETTERS ) ; List < BeanPropertyDefinition > propDefs = filterBeanProps ( ctxt , beanDesc , builder , beanDesc . findProperties ( ) , ignored ) ; if ( _factoryConfig . hasDeserializerModifiers ( ) ) { for ( BeanDeserializerModifier mod : _factoryConfig . deserializerModifiers ( ) ) { propDefs = mod . updateProperties ( ctxt . getConfig ( ) , beanDesc , propDefs ) ; } } for ( BeanPropertyDefinition propDef : propDefs ) { SettableBeanProperty prop = null ; if ( propDef . hasSetter ( ) ) { JavaType propertyType = propDef . getSetter ( ) . getParameterType ( 0 ) ; prop = constructSettableProperty ( ctxt , beanDesc , propDef , propertyType ) ; } else if ( propDef . hasField ( ) ) { JavaType propertyType = propDef . getField ( ) . getType ( ) ; prop = constructSettableProperty ( ctxt , beanDesc , propDef , propertyType ) ; } else if ( useGettersAsSetters && propDef . hasGetter ( ) ) { AnnotatedMethod getter = propDef . getGetter ( ) ; Class < ? > rawPropertyType = getter . getRawType ( ) ; if ( Collection . class . isAssignableFrom ( rawPropertyType ) || Map . class . isAssignableFrom ( rawPropertyType ) ) { prop = constructSetterlessProperty ( ctxt , beanDesc , propDef ) ; } } if ( hasCreatorProps && propDef . hasConstructorParameter ( ) ) { final String name = propDef . getName ( ) ; CreatorProperty cprop = null ; if ( creatorProps != null ) { for ( SettableBeanProperty cp : creatorProps ) { if ( name . equals ( cp . getName ( ) ) && ( cp instanceof CreatorProperty ) ) { cprop = ( CreatorProperty ) cp ; break ; } } } if ( cprop == null ) { List < String > n = new ArrayList < > ( ) ; for ( SettableBeanProperty cp : creatorProps ) { n . add ( cp . getName ( ) ) ; } ctxt . reportBadPropertyDefinition ( beanDesc , propDef , "Could not find creator property with name '%s' (known Creator properties: %s)" , name , n ) ; continue ; } if ( prop != null ) { cprop . setFallbackSetter ( prop ) ; } prop = cprop ; builder . addCreatorProperty ( cprop ) ; continue ; } if ( prop != null ) { Class < ? > [ ] views = propDef . findViews ( ) ; if ( views == null ) { if ( ! ctxt . isEnabled ( MapperFeature . DEFAULT_VIEW_INCLUSION ) ) { views = NO_VIEWS ; } } prop . setViews ( views ) ; builder . addProperty ( prop ) ; } } }
protected void addBeanProps(DeserializationContext ctxt, BeanDescription beanDesc, BeanDeserializerBuilder builder) throws JsonMappingException { final boolean isConcrete = !beanDesc.getType().isAbstract(); final SettableBeanProperty[] creatorProps = isConcrete ? builder.getValueInstantiator().getFromObjectArguments(ctxt.getConfig()) : null; final boolean hasCreatorProps = (creatorProps != null); // 01-May-2016, tatu: Which base type to use here gets tricky, since // it may often make most sense to use general type for overrides, // but what we have here may be more specific impl type. But for now // just use it as is. JsonIgnoreProperties.Value ignorals = ctxt.getConfig() .getDefaultPropertyIgnorals(beanDesc.getBeanClass(), beanDesc.getClassInfo()); Set<String> ignored; if (ignorals != null) { boolean ignoreAny = ignorals.getIgnoreUnknown(); builder.setIgnoreUnknownProperties(ignoreAny); // Or explicit/implicit definitions? ignored = ignorals.findIgnoredForDeserialization(); for (String propName : ignored) { builder.addIgnorable(propName); } } else { ignored = Collections.emptySet(); } // Also, do we have a fallback "any" setter? AnnotatedMethod anySetterMethod = beanDesc.findAnySetter(); AnnotatedMember anySetterField = null; if (anySetterMethod != null) { builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetterMethod)); } else { anySetterField = beanDesc.findAnySetterField(); if(anySetterField != null) { builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetterField)); } } // NOTE: we do NOT add @JsonIgnore'd properties into blocked ones if there's any-setter // Implicit ones via @JsonIgnore and equivalent? if (anySetterMethod == null && anySetterField == null) { Collection<String> ignored2 = beanDesc.getIgnoredPropertyNames(); if (ignored2 != null) { for (String propName : ignored2) { // allow ignoral of similarly named JSON property, but do not force; // latter means NOT adding this to 'ignored': builder.addIgnorable(propName); } } } final boolean useGettersAsSetters = ctxt.isEnabled(MapperFeature.USE_GETTERS_AS_SETTERS) && ctxt.isEnabled(MapperFeature.AUTO_DETECT_GETTERS); // Ok: let's then filter out property definitions List<BeanPropertyDefinition> propDefs = filterBeanProps(ctxt, beanDesc, builder, beanDesc.findProperties(), ignored); // After which we can let custom code change the set if (_factoryConfig.hasDeserializerModifiers()) { for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) { propDefs = mod.updateProperties(ctxt.getConfig(), beanDesc, propDefs); } } // At which point we still have all kinds of properties; not all with mutators: for (BeanPropertyDefinition propDef : propDefs) { SettableBeanProperty prop = null; /* 18-Oct-2013, tatu: Although constructor parameters have highest precedence, * we need to do linkage (as per [databind#318]), and so need to start with * other types, and only then create constructor parameter, if any. */ if (propDef.hasSetter()) { JavaType propertyType = propDef.getSetter().getParameterType(0); prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType); } else if (propDef.hasField()) { JavaType propertyType = propDef.getField().getType(); prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType); } else if (useGettersAsSetters && propDef.hasGetter()) { /* May also need to consider getters * for Map/Collection properties; but with lowest precedence */ AnnotatedMethod getter = propDef.getGetter(); // should only consider Collections and Maps, for now? Class<?> rawPropertyType = getter.getRawType(); if (Collection.class.isAssignableFrom(rawPropertyType) || Map.class.isAssignableFrom(rawPropertyType)) { prop = constructSetterlessProperty(ctxt, beanDesc, propDef); } } // 25-Sep-2014, tatu: No point in finding constructor parameters for abstract types // (since they are never used anyway) if (hasCreatorProps && propDef.hasConstructorParameter()) { /* If property is passed via constructor parameter, we must * handle things in special way. Not sure what is the most optimal way... * for now, let's just call a (new) method in builder, which does nothing. */ // but let's call a method just to allow custom builders to be aware... final String name = propDef.getName(); CreatorProperty cprop = null; if (creatorProps != null) { for (SettableBeanProperty cp : creatorProps) { if (name.equals(cp.getName()) && (cp instanceof CreatorProperty)) { cprop = (CreatorProperty) cp; break; } } } if (cprop == null) { List<String> n = new ArrayList<>(); for (SettableBeanProperty cp : creatorProps) { n.add(cp.getName()); } ctxt.reportBadPropertyDefinition(beanDesc, propDef, "Could not find creator property with name '%s' (known Creator properties: %s)", name, n); continue; } if (prop != null) { cprop.setFallbackSetter(prop); } prop = cprop; builder.addCreatorProperty(cprop); continue; } if (prop != null) { Class<?>[] views = propDef.findViews(); if (views == null) { // one more twist: if default inclusion disabled, need to force empty set of views if (!ctxt.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)) { views = NO_VIEWS; } } // one more thing before adding to builder: copy any metadata prop.setViews(views); builder.addProperty(prop); } } }
protected void addBeanProps ( DeserializationContext ctxt , BeanDescription beanDesc , BeanDeserializerBuilder builder ) throws JsonMappingException { final boolean isConcrete = ! beanDesc . getType ( ) . isAbstract ( ) ; final SettableBeanProperty [ ] creatorProps = isConcrete ? builder . getValueInstantiator ( ) . getFromObjectArguments ( ctxt . getConfig ( ) ) : null ; final boolean hasCreatorProps = ( creatorProps != null ) ; JsonIgnoreProperties . Value ignorals = ctxt . getConfig ( ) . getDefaultPropertyIgnorals ( beanDesc . getBeanClass ( ) , beanDesc . getClassInfo ( ) ) ; Set < String > ignored ; if ( ignorals != null ) { boolean ignoreAny = ignorals . getIgnoreUnknown ( ) ; builder . setIgnoreUnknownProperties ( ignoreAny ) ; ignored = ignorals . findIgnoredForDeserialization ( ) ; for ( String propName : ignored ) { builder . addIgnorable ( propName ) ; } } else { ignored = Collections . emptySet ( ) ; } AnnotatedMethod anySetterMethod = beanDesc . findAnySetter ( ) ; AnnotatedMember anySetterField = null ; if ( anySetterMethod != null ) { builder . setAnySetter ( constructAnySetter ( ctxt , beanDesc , anySetterMethod ) ) ; } else { anySetterField = beanDesc . findAnySetterField ( ) ; if ( anySetterField != null ) { builder . setAnySetter ( constructAnySetter ( ctxt , beanDesc , anySetterField ) ) ; } } if ( anySetterMethod == null && anySetterField == null ) { Collection < String > ignored2 = beanDesc . getIgnoredPropertyNames ( ) ; if ( ignored2 != null ) { for ( String propName : ignored2 ) { builder . addIgnorable ( propName ) ; } } } final boolean useGettersAsSetters = ctxt . isEnabled ( MapperFeature . USE_GETTERS_AS_SETTERS ) && ctxt . isEnabled ( MapperFeature . AUTO_DETECT_GETTERS ) ; List < BeanPropertyDefinition > propDefs = filterBeanProps ( ctxt , beanDesc , builder , beanDesc . findProperties ( ) , ignored ) ; if ( _factoryConfig . hasDeserializerModifiers ( ) ) { for ( BeanDeserializerModifier mod : _factoryConfig . deserializerModifiers ( ) ) { propDefs = mod . updateProperties ( ctxt . getConfig ( ) , beanDesc , propDefs ) ; } } for ( BeanPropertyDefinition propDef : propDefs ) { SettableBeanProperty prop = null ; if ( propDef . hasSetter ( ) ) { JavaType propertyType = propDef . getSetter ( ) . getParameterType ( 0 ) ; prop = constructSettableProperty ( ctxt , beanDesc , propDef , propertyType ) ; } else if ( propDef . hasField ( ) ) { JavaType propertyType = propDef . getField ( ) . getType ( ) ; prop = constructSettableProperty ( ctxt , beanDesc , propDef , propertyType ) ; } else if ( useGettersAsSetters && propDef . hasGetter ( ) ) { AnnotatedMethod getter = propDef . getGetter ( ) ; Class < ? > rawPropertyType = getter . getRawType ( ) ; if ( Collection . class . isAssignableFrom ( rawPropertyType ) || Map . class . isAssignableFrom ( rawPropertyType ) ) { prop = constructSetterlessProperty ( ctxt , beanDesc , propDef ) ; } } if ( hasCreatorProps && propDef . hasConstructorParameter ( ) ) { final String name = propDef . getName ( ) ; CreatorProperty cprop = null ; if ( creatorProps != null ) { for ( SettableBeanProperty cp : creatorProps ) { if ( name . equals ( cp . getName ( ) ) && ( cp instanceof CreatorProperty ) ) { cprop = ( CreatorProperty ) cp ; break ; } } } if ( cprop == null ) { List < String > n = new ArrayList < > ( ) ; for ( SettableBeanProperty cp : creatorProps ) { n . add ( cp . getName ( ) ) ; } ctxt . reportBadPropertyDefinition ( beanDesc , propDef , "Could not find creator property with name '%s' (known Creator properties: %s)" , name , n ) ; continue ; } if ( prop != null ) { cprop . setFallbackSetter ( prop ) ; } prop = cprop ; builder . addCreatorProperty ( cprop ) ; continue ; } if ( prop != null ) { Class < ? > [ ] views = propDef . findViews ( ) ; if ( views == null ) { if ( ! ctxt . isEnabled ( MapperFeature . DEFAULT_VIEW_INCLUSION ) ) { views = NO_VIEWS ; } } prop . setViews ( views ) ; builder . addProperty ( prop ) ; } } }
Math
5
src/main/java/org/apache/commons/math3/complex/Complex.java
299
321
Complex.ZERO.reciprocal() returns NaN but should return INF.
Complex.ZERO.reciprocal() returns NaN but should return INF. Class: org.apache.commons.math3.complex.Complex; Method: reciprocal() @version $Id: Complex.java 1416643 2012-12-03 19:37:14Z tn $
public Complex reciprocal() { if (isNaN) { return NaN; } if (real == 0.0 && imaginary == 0.0) { return NaN; } if (isInfinite) { return ZERO; } if (FastMath.abs(real) < FastMath.abs(imaginary)) { double q = real / imaginary; double scale = 1. / (real * q + imaginary); return createComplex(scale * q, -scale); } else { double q = imaginary / real; double scale = 1. / (imaginary * q + real); return createComplex(scale, -scale * q); } }
public Complex reciprocal ( ) { if ( isNaN ) { return NaN ; } if ( real == 0.0 && imaginary == 0.0 ) { return NaN ; } if ( isInfinite ) { return ZERO ; } if ( FastMath . abs ( real ) < FastMath . abs ( imaginary ) ) { double q = real / imaginary ; double scale = 1. / ( real * q + imaginary ) ; return createComplex ( scale * q , - scale ) ; } else { double q = imaginary / real ; double scale = 1. / ( imaginary * q + real ) ; return createComplex ( scale , - scale * q ) ; } }
public Complex reciprocal() { if (isNaN) { return NaN; } if (real == 0.0 && imaginary == 0.0) { return INF; } if (isInfinite) { return ZERO; } if (FastMath.abs(real) < FastMath.abs(imaginary)) { double q = real / imaginary; double scale = 1. / (real * q + imaginary); return createComplex(scale * q, -scale); } else { double q = imaginary / real; double scale = 1. / (imaginary * q + real); return createComplex(scale, -scale * q); } }
public Complex reciprocal ( ) { if ( isNaN ) { return NaN ; } if ( real == 0.0 && imaginary == 0.0 ) { return INF ; } if ( isInfinite ) { return ZERO ; } if ( FastMath . abs ( real ) < FastMath . abs ( imaginary ) ) { double q = real / imaginary ; double scale = 1. / ( real * q + imaginary ) ; return createComplex ( scale * q , - scale ) ; } else { double q = imaginary / real ; double scale = 1. / ( imaginary * q + real ) ; return createComplex ( scale , - scale * q ) ; } }
Compress
46
src/main/java/org/apache/commons/compress/archivers/zip/X5455
528
534
Tests failing under jdk 9 : one reflection issue, one change to ZipEntry related issue
X5455_ExtendedTimestampTest is failing under JDK 9 , due to what appears to be a bogus value returned from getTime(). It seems like the test failure might be due to the changes introduced for this: https://bugs.openjdk.java.net/browse/JDK-8073497 Tests were run using intelliJ TestRunner, using the openjdk9 build from the tip of the jdk9 tree (not dev). I believe that this is at most one commit away from what will be the RC (which was delayed at the last minute due to two issues, one of which was javadoc related, and the other hotspot.
private static ZipLong unixTimeToZipLong(long l) { final long TWO_TO_32 = 0x100000000L; if (l >= TWO_TO_32) { throw new IllegalArgumentException("X5455 timestamps must fit in a signed 32 bit integer: " + l); } return new ZipLong(l); }
private static ZipLong unixTimeToZipLong ( long l ) { final long TWO_TO_32 = 0x100000000L ; if ( l >= TWO_TO_32 ) { throw new IllegalArgumentException ( "X5455 timestamps must fit in a signed 32 bit integer: " + l ) ; } return new ZipLong ( l ) ; }
private static ZipLong unixTimeToZipLong(long l) { if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) { throw new IllegalArgumentException("X5455 timestamps must fit in a signed 32 bit integer: " + l); } return new ZipLong(l); }
private static ZipLong unixTimeToZipLong ( long l ) { if ( l < Integer . MIN_VALUE || l > Integer . MAX_VALUE ) { throw new IllegalArgumentException ( "X5455 timestamps must fit in a signed 32 bit integer: " + l ) ; } return new ZipLong ( l ) ; }
JacksonDatabind
94
src/main/java/com/fasterxml/jackson/databind/jsontype/impl/SubTypeValidator.java
71
111
Block two more gadgets to exploit default typing issue (c3p0, CVE-2018-7489)
From an email report there are 2 other c3p0 classes (above and beyond ones listed in #1737) need to be blocked. EDIT 21-Jun-2021: Fix included in: * `2.9.5` * `2.8.11.1` * `2.7.9.3` * `2.6.7.5`
public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException { // There are certain nasty classes that could cause problems, mostly // via default typing -- catch them here. final Class<?> raw = type.getRawClass(); String full = raw.getName(); main_check: do { if (_cfgIllegalClassNames.contains(full)) { break; } // 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling // for some Spring framework types // 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces if (raw.isInterface()) { ; } else if (full.startsWith(PREFIX_SPRING)) { for (Class<?> cls = raw; (cls != null) && (cls != Object.class); cls = cls.getSuperclass()){ String name = cls.getSimpleName(); // looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there? if ("AbstractPointcutAdvisor".equals(name) // ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts || "AbstractApplicationContext".equals(name)) { break main_check; } // [databind#1737]; more 3rd party // s.add("com.mchange.v2.c3p0.JndiRefForwardingDataSource"); // s.add("com.mchange.v2.c3p0.WrapperConnectionPoolDataSource"); // [databind#1931]; more 3rd party // com.mchange.v2.c3p0.ComboPooledDataSource // com.mchange.v2.c3p0.debug.AfterCloseLoggingComboPooledDataSource } } return; } while (false); throw JsonMappingException.from(ctxt, String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); }
public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( raw . isInterface ( ) ) { ; } else if ( full . startsWith ( PREFIX_SPRING ) ) { for ( Class < ? > cls = raw ; ( cls != null ) && ( cls != Object . class ) ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; }
public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException { // There are certain nasty classes that could cause problems, mostly // via default typing -- catch them here. final Class<?> raw = type.getRawClass(); String full = raw.getName(); main_check: do { if (_cfgIllegalClassNames.contains(full)) { break; } // 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling // for some Spring framework types // 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces if (raw.isInterface()) { ; } else if (full.startsWith(PREFIX_SPRING)) { for (Class<?> cls = raw; (cls != null) && (cls != Object.class); cls = cls.getSuperclass()){ String name = cls.getSimpleName(); // looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there? if ("AbstractPointcutAdvisor".equals(name) // ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts || "AbstractApplicationContext".equals(name)) { break main_check; } } } else if (full.startsWith(PREFIX_C3P0)) { // [databind#1737]; more 3rd party // s.add("com.mchange.v2.c3p0.JndiRefForwardingDataSource"); // s.add("com.mchange.v2.c3p0.WrapperConnectionPoolDataSource"); // [databind#1931]; more 3rd party // com.mchange.v2.c3p0.ComboPooledDataSource // com.mchange.v2.c3p0.debug.AfterCloseLoggingComboPooledDataSource if (full.endsWith("DataSource")) { break main_check; } } return; } while (false); throw JsonMappingException.from(ctxt, String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); }
public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( raw . isInterface ( ) ) { ; } else if ( full . startsWith ( PREFIX_SPRING ) ) { for ( Class < ? > cls = raw ; ( cls != null ) && ( cls != Object . class ) ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } else if ( full . startsWith ( PREFIX_C3P0 ) ) { if ( full . endsWith ( "DataSource" ) ) { break main_check ; } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; }
Compress
11
src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java
197
254
createArchiveInputStream detects text files less than 100 bytes as tar archives
The fix for COMPRESS-117 which modified ArchiveStreamFactory().createArchiveInputStream(inputstream) results in short text files (empirically seems to be those <= 100 bytes) being detected as tar archives which obviously is not desirable if one wants to know whether or not the files are archives. I'm not an expert on compressed archives but perhaps the heuristic that if a stream is interpretable as a tar file without an exception being thrown should only be applied on archives greater than 100 bytes?
public ArchiveInputStream createArchiveInputStream(final InputStream in) throws ArchiveException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } if (!in.markSupported()) { throw new IllegalArgumentException("Mark is not supported."); } final byte[] signature = new byte[12]; in.mark(signature.length); try { int signatureLength = in.read(signature); in.reset(); if (ZipArchiveInputStream.matches(signature, signatureLength)) { return new ZipArchiveInputStream(in); } else if (JarArchiveInputStream.matches(signature, signatureLength)) { return new JarArchiveInputStream(in); } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return new ArArchiveInputStream(in); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { return new CpioArchiveInputStream(in); } // Dump needs a bigger buffer to check the signature; final byte[] dumpsig = new byte[32]; in.mark(dumpsig.length); signatureLength = in.read(dumpsig); in.reset(); if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { return new DumpArchiveInputStream(in); } // Tar needs an even bigger buffer to check the signature; read the first block final byte[] tarheader = new byte[512]; in.mark(tarheader.length); signatureLength = in.read(tarheader); in.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { return new TarArchiveInputStream(in); } // COMPRESS-117 - improve auto-recognition try { TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); tais.getNextEntry(); return new TarArchiveInputStream(in); } catch (Exception e) { // NOPMD // can generate IllegalArgumentException as well as IOException // autodetection, simply not a TAR // ignored } } catch (IOException e) { throw new ArchiveException("Could not use reset and mark operations.", e); } throw new ArchiveException("No Archiver found for the stream signature"); }
public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; tais . getNextEntry ( ) ; return new TarArchiveInputStream ( in ) ; } catch ( Exception e ) { } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; }
public ArchiveInputStream createArchiveInputStream(final InputStream in) throws ArchiveException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } if (!in.markSupported()) { throw new IllegalArgumentException("Mark is not supported."); } final byte[] signature = new byte[12]; in.mark(signature.length); try { int signatureLength = in.read(signature); in.reset(); if (ZipArchiveInputStream.matches(signature, signatureLength)) { return new ZipArchiveInputStream(in); } else if (JarArchiveInputStream.matches(signature, signatureLength)) { return new JarArchiveInputStream(in); } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return new ArArchiveInputStream(in); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { return new CpioArchiveInputStream(in); } // Dump needs a bigger buffer to check the signature; final byte[] dumpsig = new byte[32]; in.mark(dumpsig.length); signatureLength = in.read(dumpsig); in.reset(); if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { return new DumpArchiveInputStream(in); } // Tar needs an even bigger buffer to check the signature; read the first block final byte[] tarheader = new byte[512]; in.mark(tarheader.length); signatureLength = in.read(tarheader); in.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { return new TarArchiveInputStream(in); } // COMPRESS-117 - improve auto-recognition if (signatureLength >= 512) { try { TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); tais.getNextEntry(); return new TarArchiveInputStream(in); } catch (Exception e) { // NOPMD // can generate IllegalArgumentException as well as IOException // autodetection, simply not a TAR // ignored } } } catch (IOException e) { throw new ArchiveException("Could not use reset and mark operations.", e); } throw new ArchiveException("No Archiver found for the stream signature"); }
public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } if ( signatureLength >= 512 ) { try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; tais . getNextEntry ( ) ; return new TarArchiveInputStream ( in ) ; } catch ( Exception e ) { } } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; }
Csv
14
src/main/java/org/apache/commons/csv/CSVFormat.java
1001
1106
Negative numeric values in the first column are always quoted in minimal mode
Negative Numeric values are always quoted in minimal mode if (and only if) they are in the first column. i.e. long,lat,data "-92.222",43.333,3 Looking at the code, this is by design but seem to be for an unknown reason. From v1.2 CSVPrinter line 230: // TODO where did this rule come from? if (newRecord && (c < '0' || (c > '9' && c < 'A') || (c > 'Z' && c < 'a') || (c > 'z'))) { quote = true; } else ... I propose this rule to either be remove or at a minimum be changed to: // TODO where did this rule come from? if (newRecord && (c !='-' && c < '0' || (c > '9' && c < 'A') || (c > 'Z' && c < 'a') || (c > 'z'))) { quote = true; } else ...
private void printAndQuote(final Object object, final CharSequence value, final int offset, final int len, final Appendable out, final boolean newRecord) throws IOException { boolean quote = false; int start = offset; int pos = offset; final int end = offset + len; final char delimChar = getDelimiter(); final char quoteChar = getQuoteCharacter().charValue(); QuoteMode quoteModePolicy = getQuoteMode(); if (quoteModePolicy == null) { quoteModePolicy = QuoteMode.MINIMAL; } switch (quoteModePolicy) { case ALL: quote = true; break; case NON_NUMERIC: quote = !(object instanceof Number); break; case NONE: // Use the existing escaping code printAndEscape(value, offset, len, out); return; case MINIMAL: if (len <= 0) { // always quote an empty token that is the first // on the line, as it may be the only thing on the // line. If it were not quoted in that case, // an empty line has no tokens. if (newRecord) { quote = true; } } else { char c = value.charAt(pos); // RFC4180 (https://tools.ietf.org/html/rfc4180) TEXTDATA = %x20-21 / %x23-2B / %x2D-7E if (newRecord && (c < '0' || c > '9' && c < 'A' || c > 'Z' && c < 'a' || c > 'z')) { quote = true; } else if (c <= COMMENT) { // Some other chars at the start of a value caused the parser to fail, so for now // encapsulate if we start in anything less than '#'. We are being conservative // by including the default comment char too. quote = true; } else { while (pos < end) { c = value.charAt(pos); if (c == LF || c == CR || c == quoteChar || c == delimChar) { quote = true; break; } pos++; } if (!quote) { pos = end - 1; c = value.charAt(pos); // Some other chars at the end caused the parser to fail, so for now // encapsulate if we end in anything less than ' ' if (c <= SP) { quote = true; } } } } if (!quote) { // no encapsulation needed - write out the original value out.append(value, start, end); return; } break; default: throw new IllegalStateException("Unexpected Quote value: " + quoteModePolicy); } if (!quote) { // no encapsulation needed - write out the original value out.append(value, start, end); return; } // we hit something that needed encapsulation out.append(quoteChar); // Pick up where we left off: pos should be positioned on the first character that caused // the need for encapsulation. while (pos < end) { final char c = value.charAt(pos); if (c == quoteChar) { // write out the chunk up until this point // add 1 to the length to write out the encapsulator also out.append(value, start, pos + 1); // put the next starting position on the encapsulator so we will // write it out again with the next string (effectively doubling it) start = pos; } pos++; } // write the last segment out.append(value, start, pos); out.append(quoteChar); }
private void printAndQuote ( final Object object , final CharSequence value , final int offset , final int len , final Appendable out , final boolean newRecord ) throws IOException { boolean quote = false ; int start = offset ; int pos = offset ; final int end = offset + len ; final char delimChar = getDelimiter ( ) ; final char quoteChar = getQuoteCharacter ( ) . charValue ( ) ; QuoteMode quoteModePolicy = getQuoteMode ( ) ; if ( quoteModePolicy == null ) { quoteModePolicy = QuoteMode . MINIMAL ; } switch ( quoteModePolicy ) { case ALL : quote = true ; break ; case NON_NUMERIC : quote = ! ( object instanceof Number ) ; break ; case NONE : printAndEscape ( value , offset , len , out ) ; return ; case MINIMAL : if ( len <= 0 ) { if ( newRecord ) { quote = true ; } } else { char c = value . charAt ( pos ) ; if ( newRecord && ( c < '0' || c > '9' && c < 'A' || c > 'Z' && c < 'a' || c > 'z' ) ) { quote = true ; } else if ( c <= COMMENT ) { quote = true ; } else { while ( pos < end ) { c = value . charAt ( pos ) ; if ( c == LF || c == CR || c == quoteChar || c == delimChar ) { quote = true ; break ; } pos ++ ; } if ( ! quote ) { pos = end - 1 ; c = value . charAt ( pos ) ; if ( c <= SP ) { quote = true ; } } } } if ( ! quote ) { out . append ( value , start , end ) ; return ; } break ; default : throw new IllegalStateException ( "Unexpected Quote value: " + quoteModePolicy ) ; } if ( ! quote ) { out . append ( value , start , end ) ; return ; } out . append ( quoteChar ) ; while ( pos < end ) { final char c = value . charAt ( pos ) ; if ( c == quoteChar ) { out . append ( value , start , pos + 1 ) ; start = pos ; } pos ++ ; } out . append ( value , start , pos ) ; out . append ( quoteChar ) ; }
private void printAndQuote(final Object object, final CharSequence value, final int offset, final int len, final Appendable out, final boolean newRecord) throws IOException { boolean quote = false; int start = offset; int pos = offset; final int end = offset + len; final char delimChar = getDelimiter(); final char quoteChar = getQuoteCharacter().charValue(); QuoteMode quoteModePolicy = getQuoteMode(); if (quoteModePolicy == null) { quoteModePolicy = QuoteMode.MINIMAL; } switch (quoteModePolicy) { case ALL: quote = true; break; case NON_NUMERIC: quote = !(object instanceof Number); break; case NONE: // Use the existing escaping code printAndEscape(value, offset, len, out); return; case MINIMAL: if (len <= 0) { // always quote an empty token that is the first // on the line, as it may be the only thing on the // line. If it were not quoted in that case, // an empty line has no tokens. if (newRecord) { quote = true; } } else { char c = value.charAt(pos); // RFC4180 (https://tools.ietf.org/html/rfc4180) TEXTDATA = %x20-21 / %x23-2B / %x2D-7E if (newRecord && (c < 0x20 || c > 0x21 && c < 0x23 || c > 0x2B && c < 0x2D || c > 0x7E)) { quote = true; } else if (c <= COMMENT) { // Some other chars at the start of a value caused the parser to fail, so for now // encapsulate if we start in anything less than '#'. We are being conservative // by including the default comment char too. quote = true; } else { while (pos < end) { c = value.charAt(pos); if (c == LF || c == CR || c == quoteChar || c == delimChar) { quote = true; break; } pos++; } if (!quote) { pos = end - 1; c = value.charAt(pos); // Some other chars at the end caused the parser to fail, so for now // encapsulate if we end in anything less than ' ' if (c <= SP) { quote = true; } } } } if (!quote) { // no encapsulation needed - write out the original value out.append(value, start, end); return; } break; default: throw new IllegalStateException("Unexpected Quote value: " + quoteModePolicy); } if (!quote) { // no encapsulation needed - write out the original value out.append(value, start, end); return; } // we hit something that needed encapsulation out.append(quoteChar); // Pick up where we left off: pos should be positioned on the first character that caused // the need for encapsulation. while (pos < end) { final char c = value.charAt(pos); if (c == quoteChar) { // write out the chunk up until this point // add 1 to the length to write out the encapsulator also out.append(value, start, pos + 1); // put the next starting position on the encapsulator so we will // write it out again with the next string (effectively doubling it) start = pos; } pos++; } // write the last segment out.append(value, start, pos); out.append(quoteChar); }
private void printAndQuote ( final Object object , final CharSequence value , final int offset , final int len , final Appendable out , final boolean newRecord ) throws IOException { boolean quote = false ; int start = offset ; int pos = offset ; final int end = offset + len ; final char delimChar = getDelimiter ( ) ; final char quoteChar = getQuoteCharacter ( ) . charValue ( ) ; QuoteMode quoteModePolicy = getQuoteMode ( ) ; if ( quoteModePolicy == null ) { quoteModePolicy = QuoteMode . MINIMAL ; } switch ( quoteModePolicy ) { case ALL : quote = true ; break ; case NON_NUMERIC : quote = ! ( object instanceof Number ) ; break ; case NONE : printAndEscape ( value , offset , len , out ) ; return ; case MINIMAL : if ( len <= 0 ) { if ( newRecord ) { quote = true ; } } else { char c = value . charAt ( pos ) ; if ( newRecord && ( c < 0x20 || c > 0x21 && c < 0x23 || c > 0x2B && c < 0x2D || c > 0x7E ) ) { quote = true ; } else if ( c <= COMMENT ) { quote = true ; } else { while ( pos < end ) { c = value . charAt ( pos ) ; if ( c == LF || c == CR || c == quoteChar || c == delimChar ) { quote = true ; break ; } pos ++ ; } if ( ! quote ) { pos = end - 1 ; c = value . charAt ( pos ) ; if ( c <= SP ) { quote = true ; } } } } if ( ! quote ) { out . append ( value , start , end ) ; return ; } break ; default : throw new IllegalStateException ( "Unexpected Quote value: " + quoteModePolicy ) ; } if ( ! quote ) { out . append ( value , start , end ) ; return ; } out . append ( quoteChar ) ; while ( pos < end ) { final char c = value . charAt ( pos ) ; if ( c == quoteChar ) { out . append ( value , start , pos + 1 ) ; start = pos ; } pos ++ ; } out . append ( value , start , pos ) ; out . append ( quoteChar ) ; }
Math
69
src/main/java/org/apache/commons/math/stat/correlation/PearsonsCorrelation.java
160
176
PearsonsCorrelation.getCorrelationPValues() precision limited by machine epsilon
Similar to the issue described in MATH-201, using PearsonsCorrelation.getCorrelationPValues() with many treatments results in p-values that are continuous down to 2.2e-16 but that drop to 0 after that. In MATH-201, the problem was described as such: > So in essence, the p-value returned by TTestImpl.tTest() is: > > 1.0 - (cumulativeProbability(t) - cumulativeProbabily(-t)) > > For large-ish t-statistics, cumulativeProbabilty(-t) can get quite small, and cumulativeProbabilty(t) can get very close to 1.0. When > cumulativeProbability(-t) is less than the machine epsilon, we get p-values equal to zero because: > > 1.0 - 1.0 + 0.0 = 0.0 The solution in MATH-201 was to modify the p-value calculation to this: > p = 2.0 * cumulativeProbability(-t) Here, the problem is similar. From PearsonsCorrelation.getCorrelationPValues(): p = 2 * (1 - tDistribution.cumulativeProbability(t)); Directly calculating the p-value using identical code as PearsonsCorrelation.getCorrelationPValues(), but with the following change seems to solve the problem: p = 2 * (tDistribution.cumulativeProbability(-t));
public RealMatrix getCorrelationPValues() throws MathException { TDistribution tDistribution = new TDistributionImpl(nObs - 2); int nVars = correlationMatrix.getColumnDimension(); double[][] out = new double[nVars][nVars]; for (int i = 0; i < nVars; i++) { for (int j = 0; j < nVars; j++) { if (i == j) { out[i][j] = 0d; } else { double r = correlationMatrix.getEntry(i, j); double t = Math.abs(r * Math.sqrt((nObs - 2)/(1 - r * r))); out[i][j] = 2 * (1 - tDistribution.cumulativeProbability(t)); } } } return new BlockRealMatrix(out); }
public RealMatrix getCorrelationPValues ( ) throws MathException { TDistribution tDistribution = new TDistributionImpl ( nObs - 2 ) ; int nVars = correlationMatrix . getColumnDimension ( ) ; double [ ] [ ] out = new double [ nVars ] [ nVars ] ; for ( int i = 0 ; i < nVars ; i ++ ) { for ( int j = 0 ; j < nVars ; j ++ ) { if ( i == j ) { out [ i ] [ j ] = 0d ; } else { double r = correlationMatrix . getEntry ( i , j ) ; double t = Math . abs ( r * Math . sqrt ( ( nObs - 2 ) / ( 1 - r * r ) ) ) ; out [ i ] [ j ] = 2 * ( 1 - tDistribution . cumulativeProbability ( t ) ) ; } } } return new BlockRealMatrix ( out ) ; }
public RealMatrix getCorrelationPValues() throws MathException { TDistribution tDistribution = new TDistributionImpl(nObs - 2); int nVars = correlationMatrix.getColumnDimension(); double[][] out = new double[nVars][nVars]; for (int i = 0; i < nVars; i++) { for (int j = 0; j < nVars; j++) { if (i == j) { out[i][j] = 0d; } else { double r = correlationMatrix.getEntry(i, j); double t = Math.abs(r * Math.sqrt((nObs - 2)/(1 - r * r))); out[i][j] = 2 * tDistribution.cumulativeProbability(-t); } } } return new BlockRealMatrix(out); }
public RealMatrix getCorrelationPValues ( ) throws MathException { TDistribution tDistribution = new TDistributionImpl ( nObs - 2 ) ; int nVars = correlationMatrix . getColumnDimension ( ) ; double [ ] [ ] out = new double [ nVars ] [ nVars ] ; for ( int i = 0 ; i < nVars ; i ++ ) { for ( int j = 0 ; j < nVars ; j ++ ) { if ( i == j ) { out [ i ] [ j ] = 0d ; } else { double r = correlationMatrix . getEntry ( i , j ) ; double t = Math . abs ( r * Math . sqrt ( ( nObs - 2 ) / ( 1 - r * r ) ) ) ; out [ i ] [ j ] = 2 * tDistribution . cumulativeProbability ( - t ) ; } } } return new BlockRealMatrix ( out ) ; }
Math
86
src/java/org/apache/commons/math/linear/CholeskyDecompositionImpl.java
93
151
testing for symmetric positive definite matrix in CholeskyDecomposition
I used this matrix: double[][] cv = { {0.40434286, 0.09376327, 0.30328980, 0.04909388}, {0.09376327, 0.10400408, 0.07137959, 0.04762857}, {0.30328980, 0.07137959, 0.30458776, 0.04882449}, {0.04909388, 0.04762857, 0.04882449, 0.07543265} }; And it works fine, because it is symmetric positive definite I tried this matrix: double[][] cv = { {0.40434286, -0.09376327, 0.30328980, 0.04909388}, {-0.09376327, 0.10400408, 0.07137959, 0.04762857}, {0.30328980, 0.07137959, 0.30458776, 0.04882449}, {0.04909388, 0.04762857, 0.04882449, 0.07543265} }; And it should throw an exception but it does not. I tested the matrix in R and R's cholesky decomposition method returns that the matrix is not symmetric positive definite. Obviously your code is not catching this appropriately. By the way (in my opinion) the use of exceptions to check these conditions is not the best design or use for exceptions. If you are going to force the use to try and catch these exceptions at least provide methods to test the conditions prior to the possibility of the exception.
public CholeskyDecompositionImpl(final RealMatrix matrix, final double relativeSymmetryThreshold, final double absolutePositivityThreshold) throws NonSquareMatrixException, NotSymmetricMatrixException, NotPositiveDefiniteMatrixException { if (!matrix.isSquare()) { throw new NonSquareMatrixException(matrix.getRowDimension(), matrix.getColumnDimension()); } final int order = matrix.getRowDimension(); lTData = matrix.getData(); cachedL = null; cachedLT = null; // check the matrix before transformation for (int i = 0; i < order; ++i) { final double[] lI = lTData[i]; if (lTData[i][i] < absolutePositivityThreshold) { throw new NotPositiveDefiniteMatrixException(); } // check off-diagonal elements (and reset them to 0) for (int j = i + 1; j < order; ++j) { final double[] lJ = lTData[j]; final double lIJ = lI[j]; final double lJI = lJ[i]; final double maxDelta = relativeSymmetryThreshold * Math.max(Math.abs(lIJ), Math.abs(lJI)); if (Math.abs(lIJ - lJI) > maxDelta) { throw new NotSymmetricMatrixException(); } lJ[i] = 0; } } // transform the matrix for (int i = 0; i < order; ++i) { final double[] ltI = lTData[i]; // check diagonal element ltI[i] = Math.sqrt(ltI[i]); final double inverse = 1.0 / ltI[i]; for (int q = order - 1; q > i; --q) { ltI[q] *= inverse; final double[] ltQ = lTData[q]; for (int p = q; p < order; ++p) { ltQ[p] -= ltI[q] * ltI[p]; } } } }
public CholeskyDecompositionImpl ( final RealMatrix matrix , final double relativeSymmetryThreshold , final double absolutePositivityThreshold ) throws NonSquareMatrixException , NotSymmetricMatrixException , NotPositiveDefiniteMatrixException { if ( ! matrix . isSquare ( ) ) { throw new NonSquareMatrixException ( matrix . getRowDimension ( ) , matrix . getColumnDimension ( ) ) ; } final int order = matrix . getRowDimension ( ) ; lTData = matrix . getData ( ) ; cachedL = null ; cachedLT = null ; for ( int i = 0 ; i < order ; ++ i ) { final double [ ] lI = lTData [ i ] ; if ( lTData [ i ] [ i ] < absolutePositivityThreshold ) { throw new NotPositiveDefiniteMatrixException ( ) ; } for ( int j = i + 1 ; j < order ; ++ j ) { final double [ ] lJ = lTData [ j ] ; final double lIJ = lI [ j ] ; final double lJI = lJ [ i ] ; final double maxDelta = relativeSymmetryThreshold * Math . max ( Math . abs ( lIJ ) , Math . abs ( lJI ) ) ; if ( Math . abs ( lIJ - lJI ) > maxDelta ) { throw new NotSymmetricMatrixException ( ) ; } lJ [ i ] = 0 ; } } for ( int i = 0 ; i < order ; ++ i ) { final double [ ] ltI = lTData [ i ] ; ltI [ i ] = Math . sqrt ( ltI [ i ] ) ; final double inverse = 1.0 / ltI [ i ] ; for ( int q = order - 1 ; q > i ; -- q ) { ltI [ q ] *= inverse ; final double [ ] ltQ = lTData [ q ] ; for ( int p = q ; p < order ; ++ p ) { ltQ [ p ] -= ltI [ q ] * ltI [ p ] ; } } } }
public CholeskyDecompositionImpl(final RealMatrix matrix, final double relativeSymmetryThreshold, final double absolutePositivityThreshold) throws NonSquareMatrixException, NotSymmetricMatrixException, NotPositiveDefiniteMatrixException { if (!matrix.isSquare()) { throw new NonSquareMatrixException(matrix.getRowDimension(), matrix.getColumnDimension()); } final int order = matrix.getRowDimension(); lTData = matrix.getData(); cachedL = null; cachedLT = null; // check the matrix before transformation for (int i = 0; i < order; ++i) { final double[] lI = lTData[i]; // check off-diagonal elements (and reset them to 0) for (int j = i + 1; j < order; ++j) { final double[] lJ = lTData[j]; final double lIJ = lI[j]; final double lJI = lJ[i]; final double maxDelta = relativeSymmetryThreshold * Math.max(Math.abs(lIJ), Math.abs(lJI)); if (Math.abs(lIJ - lJI) > maxDelta) { throw new NotSymmetricMatrixException(); } lJ[i] = 0; } } // transform the matrix for (int i = 0; i < order; ++i) { final double[] ltI = lTData[i]; // check diagonal element if (ltI[i] < absolutePositivityThreshold) { throw new NotPositiveDefiniteMatrixException(); } ltI[i] = Math.sqrt(ltI[i]); final double inverse = 1.0 / ltI[i]; for (int q = order - 1; q > i; --q) { ltI[q] *= inverse; final double[] ltQ = lTData[q]; for (int p = q; p < order; ++p) { ltQ[p] -= ltI[q] * ltI[p]; } } } }
public CholeskyDecompositionImpl ( final RealMatrix matrix , final double relativeSymmetryThreshold , final double absolutePositivityThreshold ) throws NonSquareMatrixException , NotSymmetricMatrixException , NotPositiveDefiniteMatrixException { if ( ! matrix . isSquare ( ) ) { throw new NonSquareMatrixException ( matrix . getRowDimension ( ) , matrix . getColumnDimension ( ) ) ; } final int order = matrix . getRowDimension ( ) ; lTData = matrix . getData ( ) ; cachedL = null ; cachedLT = null ; for ( int i = 0 ; i < order ; ++ i ) { final double [ ] lI = lTData [ i ] ; for ( int j = i + 1 ; j < order ; ++ j ) { final double [ ] lJ = lTData [ j ] ; final double lIJ = lI [ j ] ; final double lJI = lJ [ i ] ; final double maxDelta = relativeSymmetryThreshold * Math . max ( Math . abs ( lIJ ) , Math . abs ( lJI ) ) ; if ( Math . abs ( lIJ - lJI ) > maxDelta ) { throw new NotSymmetricMatrixException ( ) ; } lJ [ i ] = 0 ; } } for ( int i = 0 ; i < order ; ++ i ) { final double [ ] ltI = lTData [ i ] ; if ( ltI [ i ] < absolutePositivityThreshold ) { throw new NotPositiveDefiniteMatrixException ( ) ; } ltI [ i ] = Math . sqrt ( ltI [ i ] ) ; final double inverse = 1.0 / ltI [ i ] ; for ( int q = order - 1 ; q > i ; -- q ) { ltI [ q ] *= inverse ; final double [ ] ltQ = lTData [ q ] ; for ( int p = q ; p < order ; ++ p ) { ltQ [ p ] -= ltI [ q ] * ltI [ p ] ; } } } }
Csv
7
src/main/java/org/apache/commons/csv/CSVParser.java
348
376
HeaderMap is inconsistent when it is parsed from an input with duplicate columns names
Given a parser format for csv files with a header line: {code} CSVFormat myFormat = CSVFormat.RFC4180.withDelimiter(",").withQuoteChar('"').withQuotePolicy(Quote.MINIMAL) .withIgnoreSurroundingSpaces(true).withHeader().withSkipHeaderRecord(true); {code} And given a file with duplicate header names: Col1,Col2,Col2,Col3,Col4 1,2,3,4,5 4,5,6,7,8 The HeaderMap returned by the parser misses an entry because of the Column name being used as a key, leading to wrong behavior when we rely on it. If this is not supposed to happen in the file regarding the CSV format, at least this should raise an error. If not we should come up with a more clever way to store and access the headers.
private Map<String, Integer> initializeHeader() throws IOException { Map<String, Integer> hdrMap = null; final String[] formatHeader = this.format.getHeader(); if (formatHeader != null) { hdrMap = new LinkedHashMap<String, Integer>(); String[] header = null; if (formatHeader.length == 0) { // read the header from the first line of the file final CSVRecord nextRecord = this.nextRecord(); if (nextRecord != null) { header = nextRecord.values(); } } else { if (this.format.getSkipHeaderRecord()) { this.nextRecord(); } header = formatHeader; } // build the name to index mappings if (header != null) { for (int i = 0; i < header.length; i++) { hdrMap.put(header[i], Integer.valueOf(i)); } } } return hdrMap; }
private Map < String , Integer > initializeHeader ( ) throws IOException { Map < String , Integer > hdrMap = null ; final String [ ] formatHeader = this . format . getHeader ( ) ; if ( formatHeader != null ) { hdrMap = new LinkedHashMap < String , Integer > ( ) ; String [ ] header = null ; if ( formatHeader . length == 0 ) { final CSVRecord nextRecord = this . nextRecord ( ) ; if ( nextRecord != null ) { header = nextRecord . values ( ) ; } } else { if ( this . format . getSkipHeaderRecord ( ) ) { this . nextRecord ( ) ; } header = formatHeader ; } if ( header != null ) { for ( int i = 0 ; i < header . length ; i ++ ) { hdrMap . put ( header [ i ] , Integer . valueOf ( i ) ) ; } } } return hdrMap ; }
private Map<String, Integer> initializeHeader() throws IOException { Map<String, Integer> hdrMap = null; final String[] formatHeader = this.format.getHeader(); if (formatHeader != null) { hdrMap = new LinkedHashMap<String, Integer>(); String[] header = null; if (formatHeader.length == 0) { // read the header from the first line of the file final CSVRecord nextRecord = this.nextRecord(); if (nextRecord != null) { header = nextRecord.values(); } } else { if (this.format.getSkipHeaderRecord()) { this.nextRecord(); } header = formatHeader; } // build the name to index mappings if (header != null) { for (int i = 0; i < header.length; i++) { if (hdrMap.containsKey(header[i])) { throw new IllegalStateException("The header contains duplicate names: " + Arrays.toString(header)); } hdrMap.put(header[i], Integer.valueOf(i)); } } } return hdrMap; }
private Map < String , Integer > initializeHeader ( ) throws IOException { Map < String , Integer > hdrMap = null ; final String [ ] formatHeader = this . format . getHeader ( ) ; if ( formatHeader != null ) { hdrMap = new LinkedHashMap < String , Integer > ( ) ; String [ ] header = null ; if ( formatHeader . length == 0 ) { final CSVRecord nextRecord = this . nextRecord ( ) ; if ( nextRecord != null ) { header = nextRecord . values ( ) ; } } else { if ( this . format . getSkipHeaderRecord ( ) ) { this . nextRecord ( ) ; } header = formatHeader ; } if ( header != null ) { for ( int i = 0 ; i < header . length ; i ++ ) { if ( hdrMap . containsKey ( header [ i ] ) ) { throw new IllegalStateException ( "The header contains duplicate names: " + Arrays . toString ( header ) ) ; } hdrMap . put ( header [ i ] , Integer . valueOf ( i ) ) ; } } } return hdrMap ; }
JacksonXml
1
src/main/java/com/fasterxml/jackson/dataformat/xml/deser/FromXmlParser.java
444
578
Problem with deserialization of nested non-wrapped lists, with empty inner list
Looks like there is a problem, wherein nested structures like say: - Definition POJO, with `records`, unwrapped List with `Record` - `Record` POJO having property `fields`, another unwrapped list of `Field` POJOs and case where inner `List` happens to be empty/missing, cause incorrectly "split" parts of outermost `List`s (here for property `records`). I will come up with a full reproduction later on, but observed this in the wild, and I think it occurs with latest 2.7.0-rc code, as well as `2.6.4-1`, so is not just something that has been fixed with a later version.
@Override public JsonToken nextToken() throws IOException { _binaryValue = null; if (_nextToken != null) { JsonToken t = _nextToken; _currToken = t; _nextToken = null; switch (t) { case START_OBJECT: _parsingContext = _parsingContext.createChildObjectContext(-1, -1); break; case START_ARRAY: _parsingContext = _parsingContext.createChildArrayContext(-1, -1); break; case END_OBJECT: case END_ARRAY: _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); break; case FIELD_NAME: _parsingContext.setCurrentName(_xmlTokens.getLocalName()); break; default: // VALUE_STRING, VALUE_NULL // should be fine as is? } return t; } int token = _xmlTokens.next(); // Need to have a loop just because we may have to eat/convert // a start-element that indicates an array element. while (token == XmlTokenStream.XML_START_ELEMENT) { // If we thought we might get leaf, no such luck if (_mayBeLeaf) { // leave _mayBeLeaf set, as we start a new context _nextToken = JsonToken.FIELD_NAME; _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } if (_parsingContext.inArray()) { // Yup: in array, so this element could be verified; but it won't be // reported anyway, and we need to process following event. token = _xmlTokens.next(); _mayBeLeaf = true; continue; } String name = _xmlTokens.getLocalName(); _parsingContext.setCurrentName(name); // Ok: virtual wrapping can be done by simply repeating current START_ELEMENT. // Couple of ways to do it; but start by making _xmlTokens replay the thing... if (_namesToWrap != null && _namesToWrap.contains(name)) { _xmlTokens.repeatStartElement(); } _mayBeLeaf = true; // Ok: in array context we need to skip reporting field names. // But what's the best way to find next token? return (_currToken = JsonToken.FIELD_NAME); } // Ok; beyond start element, what do we get? switch (token) { case XmlTokenStream.XML_END_ELEMENT: // Simple, except that if this is a leaf, need to suppress end: if (_mayBeLeaf) { _mayBeLeaf = false; // 06-Jan-2015, tatu: as per [dataformat-xml#180], need to // expose as empty Object, not null return (_currToken = JsonToken.VALUE_NULL); } _currToken = _parsingContext.inArray() ? JsonToken.END_ARRAY : JsonToken.END_OBJECT; _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); return _currToken; case XmlTokenStream.XML_ATTRIBUTE_NAME: // If there was a chance of leaf node, no more... if (_mayBeLeaf) { _mayBeLeaf = false; _nextToken = JsonToken.FIELD_NAME; _currText = _xmlTokens.getText(); _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } _parsingContext.setCurrentName(_xmlTokens.getLocalName()); return (_currToken = JsonToken.FIELD_NAME); case XmlTokenStream.XML_ATTRIBUTE_VALUE: _currText = _xmlTokens.getText(); return (_currToken = JsonToken.VALUE_STRING); case XmlTokenStream.XML_TEXT: _currText = _xmlTokens.getText(); if (_mayBeLeaf) { _mayBeLeaf = false; /* One more refinement (pronunced like "hack") is that if * we had an empty String (or all white space), and we are * deserializing an array, we better hide the empty text. */ // Also: must skip following END_ELEMENT _xmlTokens.skipEndElement(); if (_parsingContext.inArray()) { if (_isEmpty(_currText)) { // 06-Jan-2015, tatu: as per [dataformat-xml#180], need to // expose as empty Object, not null (or, worse, as used to // be done, by swallowing the token) _currToken = JsonToken.END_ARRAY; _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); return _currToken; } } return (_currToken = JsonToken.VALUE_STRING); } else { // [dataformat-xml#177]: empty text may also need to be skipped if (_parsingContext.inObject() && (_currToken != JsonToken.FIELD_NAME) && _isEmpty(_currText)) { _currToken = JsonToken.END_OBJECT; _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); return _currToken; } } // If not a leaf (or otherwise ignorable), need to transform into property... _parsingContext.setCurrentName(_cfgNameForTextElement); _nextToken = JsonToken.VALUE_STRING; return (_currToken = JsonToken.FIELD_NAME); case XmlTokenStream.XML_END: return (_currToken = null); } // should never get here _throwInternal(); return null; }
@ Override public JsonToken nextToken ( ) throws IOException { _binaryValue = null ; if ( _nextToken != null ) { JsonToken t = _nextToken ; _currToken = t ; _nextToken = null ; switch ( t ) { case START_OBJECT : _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; break ; case START_ARRAY : _parsingContext = _parsingContext . createChildArrayContext ( - 1 , - 1 ) ; break ; case END_OBJECT : case END_ARRAY : _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; break ; case FIELD_NAME : _parsingContext . setCurrentName ( _xmlTokens . getLocalName ( ) ) ; break ; default : } return t ; } int token = _xmlTokens . next ( ) ; while ( token == XmlTokenStream . XML_START_ELEMENT ) { if ( _mayBeLeaf ) { _nextToken = JsonToken . FIELD_NAME ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } if ( _parsingContext . inArray ( ) ) { token = _xmlTokens . next ( ) ; _mayBeLeaf = true ; continue ; } String name = _xmlTokens . getLocalName ( ) ; _parsingContext . setCurrentName ( name ) ; if ( _namesToWrap != null && _namesToWrap . contains ( name ) ) { _xmlTokens . repeatStartElement ( ) ; } _mayBeLeaf = true ; return ( _currToken = JsonToken . FIELD_NAME ) ; } switch ( token ) { case XmlTokenStream . XML_END_ELEMENT : if ( _mayBeLeaf ) { _mayBeLeaf = false ; return ( _currToken = JsonToken . VALUE_NULL ) ; } _currToken = _parsingContext . inArray ( ) ? JsonToken . END_ARRAY : JsonToken . END_OBJECT ; _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; return _currToken ; case XmlTokenStream . XML_ATTRIBUTE_NAME : if ( _mayBeLeaf ) { _mayBeLeaf = false ; _nextToken = JsonToken . FIELD_NAME ; _currText = _xmlTokens . getText ( ) ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } _parsingContext . setCurrentName ( _xmlTokens . getLocalName ( ) ) ; return ( _currToken = JsonToken . FIELD_NAME ) ; case XmlTokenStream . XML_ATTRIBUTE_VALUE : _currText = _xmlTokens . getText ( ) ; return ( _currToken = JsonToken . VALUE_STRING ) ; case XmlTokenStream . XML_TEXT : _currText = _xmlTokens . getText ( ) ; if ( _mayBeLeaf ) { _mayBeLeaf = false ; _xmlTokens . skipEndElement ( ) ; if ( _parsingContext . inArray ( ) ) { if ( _isEmpty ( _currText ) ) { _currToken = JsonToken . END_ARRAY ; _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; return _currToken ; } } return ( _currToken = JsonToken . VALUE_STRING ) ; } else { if ( _parsingContext . inObject ( ) && ( _currToken != JsonToken . FIELD_NAME ) && _isEmpty ( _currText ) ) { _currToken = JsonToken . END_OBJECT ; _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; return _currToken ; } } _parsingContext . setCurrentName ( _cfgNameForTextElement ) ; _nextToken = JsonToken . VALUE_STRING ; return ( _currToken = JsonToken . FIELD_NAME ) ; case XmlTokenStream . XML_END : return ( _currToken = null ) ; } _throwInternal ( ) ; return null ; }
@Override public JsonToken nextToken() throws IOException { _binaryValue = null; if (_nextToken != null) { JsonToken t = _nextToken; _currToken = t; _nextToken = null; switch (t) { case START_OBJECT: _parsingContext = _parsingContext.createChildObjectContext(-1, -1); break; case START_ARRAY: _parsingContext = _parsingContext.createChildArrayContext(-1, -1); break; case END_OBJECT: case END_ARRAY: _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); break; case FIELD_NAME: _parsingContext.setCurrentName(_xmlTokens.getLocalName()); break; default: // VALUE_STRING, VALUE_NULL // should be fine as is? } return t; } int token = _xmlTokens.next(); // Need to have a loop just because we may have to eat/convert // a start-element that indicates an array element. while (token == XmlTokenStream.XML_START_ELEMENT) { // If we thought we might get leaf, no such luck if (_mayBeLeaf) { // leave _mayBeLeaf set, as we start a new context _nextToken = JsonToken.FIELD_NAME; _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } if (_parsingContext.inArray()) { // Yup: in array, so this element could be verified; but it won't be // reported anyway, and we need to process following event. token = _xmlTokens.next(); _mayBeLeaf = true; continue; } String name = _xmlTokens.getLocalName(); _parsingContext.setCurrentName(name); // Ok: virtual wrapping can be done by simply repeating current START_ELEMENT. // Couple of ways to do it; but start by making _xmlTokens replay the thing... if (_namesToWrap != null && _namesToWrap.contains(name)) { _xmlTokens.repeatStartElement(); } _mayBeLeaf = true; // Ok: in array context we need to skip reporting field names. // But what's the best way to find next token? return (_currToken = JsonToken.FIELD_NAME); } // Ok; beyond start element, what do we get? switch (token) { case XmlTokenStream.XML_END_ELEMENT: // Simple, except that if this is a leaf, need to suppress end: if (_mayBeLeaf) { _mayBeLeaf = false; if (_parsingContext.inArray()) { // 06-Jan-2015, tatu: as per [dataformat-xml#180], need to // expose as empty Object, not null _nextToken = JsonToken.END_OBJECT; _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } return (_currToken = JsonToken.VALUE_NULL); } _currToken = _parsingContext.inArray() ? JsonToken.END_ARRAY : JsonToken.END_OBJECT; _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); return _currToken; case XmlTokenStream.XML_ATTRIBUTE_NAME: // If there was a chance of leaf node, no more... if (_mayBeLeaf) { _mayBeLeaf = false; _nextToken = JsonToken.FIELD_NAME; _currText = _xmlTokens.getText(); _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } _parsingContext.setCurrentName(_xmlTokens.getLocalName()); return (_currToken = JsonToken.FIELD_NAME); case XmlTokenStream.XML_ATTRIBUTE_VALUE: _currText = _xmlTokens.getText(); return (_currToken = JsonToken.VALUE_STRING); case XmlTokenStream.XML_TEXT: _currText = _xmlTokens.getText(); if (_mayBeLeaf) { _mayBeLeaf = false; /* One more refinement (pronunced like "hack") is that if * we had an empty String (or all white space), and we are * deserializing an array, we better hide the empty text. */ // Also: must skip following END_ELEMENT _xmlTokens.skipEndElement(); if (_parsingContext.inArray()) { if (_isEmpty(_currText)) { // 06-Jan-2015, tatu: as per [dataformat-xml#180], need to // expose as empty Object, not null (or, worse, as used to // be done, by swallowing the token) _nextToken = JsonToken.END_OBJECT; _parsingContext = _parsingContext.createChildObjectContext(-1, -1); return (_currToken = JsonToken.START_OBJECT); } } return (_currToken = JsonToken.VALUE_STRING); } else { // [dataformat-xml#177]: empty text may also need to be skipped if (_parsingContext.inObject() && (_currToken != JsonToken.FIELD_NAME) && _isEmpty(_currText)) { _currToken = JsonToken.END_OBJECT; _parsingContext = _parsingContext.getParent(); _namesToWrap = _parsingContext.getNamesToWrap(); return _currToken; } } // If not a leaf (or otherwise ignorable), need to transform into property... _parsingContext.setCurrentName(_cfgNameForTextElement); _nextToken = JsonToken.VALUE_STRING; return (_currToken = JsonToken.FIELD_NAME); case XmlTokenStream.XML_END: return (_currToken = null); } // should never get here _throwInternal(); return null; }
@ Override public JsonToken nextToken ( ) throws IOException { _binaryValue = null ; if ( _nextToken != null ) { JsonToken t = _nextToken ; _currToken = t ; _nextToken = null ; switch ( t ) { case START_OBJECT : _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; break ; case START_ARRAY : _parsingContext = _parsingContext . createChildArrayContext ( - 1 , - 1 ) ; break ; case END_OBJECT : case END_ARRAY : _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; break ; case FIELD_NAME : _parsingContext . setCurrentName ( _xmlTokens . getLocalName ( ) ) ; break ; default : } return t ; } int token = _xmlTokens . next ( ) ; while ( token == XmlTokenStream . XML_START_ELEMENT ) { if ( _mayBeLeaf ) { _nextToken = JsonToken . FIELD_NAME ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } if ( _parsingContext . inArray ( ) ) { token = _xmlTokens . next ( ) ; _mayBeLeaf = true ; continue ; } String name = _xmlTokens . getLocalName ( ) ; _parsingContext . setCurrentName ( name ) ; if ( _namesToWrap != null && _namesToWrap . contains ( name ) ) { _xmlTokens . repeatStartElement ( ) ; } _mayBeLeaf = true ; return ( _currToken = JsonToken . FIELD_NAME ) ; } switch ( token ) { case XmlTokenStream . XML_END_ELEMENT : if ( _mayBeLeaf ) { _mayBeLeaf = false ; if ( _parsingContext . inArray ( ) ) { _nextToken = JsonToken . END_OBJECT ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } return ( _currToken = JsonToken . VALUE_NULL ) ; } _currToken = _parsingContext . inArray ( ) ? JsonToken . END_ARRAY : JsonToken . END_OBJECT ; _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; return _currToken ; case XmlTokenStream . XML_ATTRIBUTE_NAME : if ( _mayBeLeaf ) { _mayBeLeaf = false ; _nextToken = JsonToken . FIELD_NAME ; _currText = _xmlTokens . getText ( ) ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } _parsingContext . setCurrentName ( _xmlTokens . getLocalName ( ) ) ; return ( _currToken = JsonToken . FIELD_NAME ) ; case XmlTokenStream . XML_ATTRIBUTE_VALUE : _currText = _xmlTokens . getText ( ) ; return ( _currToken = JsonToken . VALUE_STRING ) ; case XmlTokenStream . XML_TEXT : _currText = _xmlTokens . getText ( ) ; if ( _mayBeLeaf ) { _mayBeLeaf = false ; _xmlTokens . skipEndElement ( ) ; if ( _parsingContext . inArray ( ) ) { if ( _isEmpty ( _currText ) ) { _nextToken = JsonToken . END_OBJECT ; _parsingContext = _parsingContext . createChildObjectContext ( - 1 , - 1 ) ; return ( _currToken = JsonToken . START_OBJECT ) ; } } return ( _currToken = JsonToken . VALUE_STRING ) ; } else { if ( _parsingContext . inObject ( ) && ( _currToken != JsonToken . FIELD_NAME ) && _isEmpty ( _currText ) ) { _currToken = JsonToken . END_OBJECT ; _parsingContext = _parsingContext . getParent ( ) ; _namesToWrap = _parsingContext . getNamesToWrap ( ) ; return _currToken ; } } _parsingContext . setCurrentName ( _cfgNameForTextElement ) ; _nextToken = JsonToken . VALUE_STRING ; return ( _currToken = JsonToken . FIELD_NAME ) ; case XmlTokenStream . XML_END : return ( _currToken = null ) ; } _throwInternal ( ) ; return null ; }
JxPath
18
src/java/org/apache/commons/jxpath/ri/axes/AttributeContext.java
71
90
Issue with attribute::
Checking test (Issue172_CountAttributeNode) I came with the following fix for the code in AttributeContext line 72 from ----- if (!(nodeTest instanceof NodeNameTest)) { return false; } QName name = ((NodeNameTest) nodeTest).getNodeName(); ------ ' to --- (outside method) private static final QName WILDCARD = new QName("", "*"); --- (in method) final QName name ; if (nodeTest instanceof NodeTypeTest) { if (((NodeTypeTest) nodeTest).getNodeType() == Compiler.NODE_TYPE_NODE) name = WILDCARD; else return false; } else if (nodeTest instanceof NodeNameTest) { name = ((NodeNameTest) nodeTest).getNodeName(); } else { return false; }
public boolean nextNode() { super.setPosition(getCurrentPosition() + 1); if (!setStarted) { setStarted = true; if (!(nodeTest instanceof NodeNameTest)) { return false; } QName name = ((NodeNameTest) nodeTest).getNodeName(); iterator = parentContext.getCurrentNodePointer().attributeIterator(name); } if (iterator == null) { return false; } if (!iterator.setPosition(iterator.getPosition() + 1)) { return false; } currentNodePointer = iterator.getNodePointer(); return true; }
public boolean nextNode ( ) { super . setPosition ( getCurrentPosition ( ) + 1 ) ; if ( ! setStarted ) { setStarted = true ; if ( ! ( nodeTest instanceof NodeNameTest ) ) { return false ; } QName name = ( ( NodeNameTest ) nodeTest ) . getNodeName ( ) ; iterator = parentContext . getCurrentNodePointer ( ) . attributeIterator ( name ) ; } if ( iterator == null ) { return false ; } if ( ! iterator . setPosition ( iterator . getPosition ( ) + 1 ) ) { return false ; } currentNodePointer = iterator . getNodePointer ( ) ; return true ; }
public boolean nextNode() { super.setPosition(getCurrentPosition() + 1); if (!setStarted) { setStarted = true; NodeNameTest nodeNameTest = null; if (nodeTest instanceof NodeTypeTest) { if (((NodeTypeTest) nodeTest).getNodeType() == Compiler.NODE_TYPE_NODE) { nodeNameTest = WILDCARD_TEST; } } else if (nodeTest instanceof NodeNameTest) { nodeNameTest = (NodeNameTest) nodeTest; } if (nodeNameTest == null) { return false; } iterator = parentContext.getCurrentNodePointer().attributeIterator( nodeNameTest.getNodeName()); } if (iterator == null) { return false; } if (!iterator.setPosition(iterator.getPosition() + 1)) { return false; } currentNodePointer = iterator.getNodePointer(); return true; }
public boolean nextNode ( ) { super . setPosition ( getCurrentPosition ( ) + 1 ) ; if ( ! setStarted ) { setStarted = true ; NodeNameTest nodeNameTest = null ; if ( nodeTest instanceof NodeTypeTest ) { if ( ( ( NodeTypeTest ) nodeTest ) . getNodeType ( ) == Compiler . NODE_TYPE_NODE ) { nodeNameTest = WILDCARD_TEST ; } } else if ( nodeTest instanceof NodeNameTest ) { nodeNameTest = ( NodeNameTest ) nodeTest ; } if ( nodeNameTest == null ) { return false ; } iterator = parentContext . getCurrentNodePointer ( ) . attributeIterator ( nodeNameTest . getNodeName ( ) ) ; } if ( iterator == null ) { return false ; } if ( ! iterator . setPosition ( iterator . getPosition ( ) + 1 ) ) { return false ; } currentNodePointer = iterator . getNodePointer ( ) ; return true ; }
Codec
2
src/java/org/apache/commons/codec/binary/Base64.java
414
473
Base64 bug with empty input (new byte[0])
Base64.encode(new byte[0]) doesn't return an empty byte array back! It returns CRLF.
void encode(byte[] in, int inPos, int inAvail) { if (eof) { return; } // inAvail < 0 is how we're informed of EOF in the underlying data we're // encoding. if (inAvail < 0) { eof = true; if (buf == null || buf.length - pos < encodeSize) { resizeBuf(); } switch (modulus) { case 1: buf[pos++] = encodeTable[(x >> 2) & MASK_6BITS]; buf[pos++] = encodeTable[(x << 4) & MASK_6BITS]; // URL-SAFE skips the padding to further reduce size. if (encodeTable == STANDARD_ENCODE_TABLE) { buf[pos++] = PAD; buf[pos++] = PAD; } break; case 2: buf[pos++] = encodeTable[(x >> 10) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 4) & MASK_6BITS]; buf[pos++] = encodeTable[(x << 2) & MASK_6BITS]; // URL-SAFE skips the padding to further reduce size. if (encodeTable == STANDARD_ENCODE_TABLE) { buf[pos++] = PAD; } break; } if (lineLength > 0) { System.arraycopy(lineSeparator, 0, buf, pos, lineSeparator.length); pos += lineSeparator.length; } } else { for (int i = 0; i < inAvail; i++) { if (buf == null || buf.length - pos < encodeSize) { resizeBuf(); } modulus = (++modulus) % 3; int b = in[inPos++]; if (b < 0) { b += 256; } x = (x << 8) + b; if (0 == modulus) { buf[pos++] = encodeTable[(x >> 18) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 12) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 6) & MASK_6BITS]; buf[pos++] = encodeTable[x & MASK_6BITS]; currentLinePos += 4; if (lineLength > 0 && lineLength <= currentLinePos) { System.arraycopy(lineSeparator, 0, buf, pos, lineSeparator.length); pos += lineSeparator.length; currentLinePos = 0; } } } } }
void encode ( byte [ ] in , int inPos , int inAvail ) { if ( eof ) { return ; } if ( inAvail < 0 ) { eof = true ; if ( buf == null || buf . length - pos < encodeSize ) { resizeBuf ( ) ; } switch ( modulus ) { case 1 : buf [ pos ++ ] = encodeTable [ ( x >> 2 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x << 4 ) & MASK_6BITS ] ; if ( encodeTable == STANDARD_ENCODE_TABLE ) { buf [ pos ++ ] = PAD ; buf [ pos ++ ] = PAD ; } break ; case 2 : buf [ pos ++ ] = encodeTable [ ( x >> 10 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 4 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x << 2 ) & MASK_6BITS ] ; if ( encodeTable == STANDARD_ENCODE_TABLE ) { buf [ pos ++ ] = PAD ; } break ; } if ( lineLength > 0 ) { System . arraycopy ( lineSeparator , 0 , buf , pos , lineSeparator . length ) ; pos += lineSeparator . length ; } } else { for ( int i = 0 ; i < inAvail ; i ++ ) { if ( buf == null || buf . length - pos < encodeSize ) { resizeBuf ( ) ; } modulus = ( ++ modulus ) % 3 ; int b = in [ inPos ++ ] ; if ( b < 0 ) { b += 256 ; } x = ( x << 8 ) + b ; if ( 0 == modulus ) { buf [ pos ++ ] = encodeTable [ ( x >> 18 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 12 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 6 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ x & MASK_6BITS ] ; currentLinePos += 4 ; if ( lineLength > 0 && lineLength <= currentLinePos ) { System . arraycopy ( lineSeparator , 0 , buf , pos , lineSeparator . length ) ; pos += lineSeparator . length ; currentLinePos = 0 ; } } } } }
void encode(byte[] in, int inPos, int inAvail) { if (eof) { return; } // inAvail < 0 is how we're informed of EOF in the underlying data we're // encoding. if (inAvail < 0) { eof = true; if (buf == null || buf.length - pos < encodeSize) { resizeBuf(); } switch (modulus) { case 1: buf[pos++] = encodeTable[(x >> 2) & MASK_6BITS]; buf[pos++] = encodeTable[(x << 4) & MASK_6BITS]; // URL-SAFE skips the padding to further reduce size. if (encodeTable == STANDARD_ENCODE_TABLE) { buf[pos++] = PAD; buf[pos++] = PAD; } break; case 2: buf[pos++] = encodeTable[(x >> 10) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 4) & MASK_6BITS]; buf[pos++] = encodeTable[(x << 2) & MASK_6BITS]; // URL-SAFE skips the padding to further reduce size. if (encodeTable == STANDARD_ENCODE_TABLE) { buf[pos++] = PAD; } break; } if (lineLength > 0 && pos > 0) { System.arraycopy(lineSeparator, 0, buf, pos, lineSeparator.length); pos += lineSeparator.length; } } else { for (int i = 0; i < inAvail; i++) { if (buf == null || buf.length - pos < encodeSize) { resizeBuf(); } modulus = (++modulus) % 3; int b = in[inPos++]; if (b < 0) { b += 256; } x = (x << 8) + b; if (0 == modulus) { buf[pos++] = encodeTable[(x >> 18) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 12) & MASK_6BITS]; buf[pos++] = encodeTable[(x >> 6) & MASK_6BITS]; buf[pos++] = encodeTable[x & MASK_6BITS]; currentLinePos += 4; if (lineLength > 0 && lineLength <= currentLinePos) { System.arraycopy(lineSeparator, 0, buf, pos, lineSeparator.length); pos += lineSeparator.length; currentLinePos = 0; } } } } }
void encode ( byte [ ] in , int inPos , int inAvail ) { if ( eof ) { return ; } if ( inAvail < 0 ) { eof = true ; if ( buf == null || buf . length - pos < encodeSize ) { resizeBuf ( ) ; } switch ( modulus ) { case 1 : buf [ pos ++ ] = encodeTable [ ( x >> 2 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x << 4 ) & MASK_6BITS ] ; if ( encodeTable == STANDARD_ENCODE_TABLE ) { buf [ pos ++ ] = PAD ; buf [ pos ++ ] = PAD ; } break ; case 2 : buf [ pos ++ ] = encodeTable [ ( x >> 10 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 4 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x << 2 ) & MASK_6BITS ] ; if ( encodeTable == STANDARD_ENCODE_TABLE ) { buf [ pos ++ ] = PAD ; } break ; } if ( lineLength > 0 && pos > 0 ) { System . arraycopy ( lineSeparator , 0 , buf , pos , lineSeparator . length ) ; pos += lineSeparator . length ; } } else { for ( int i = 0 ; i < inAvail ; i ++ ) { if ( buf == null || buf . length - pos < encodeSize ) { resizeBuf ( ) ; } modulus = ( ++ modulus ) % 3 ; int b = in [ inPos ++ ] ; if ( b < 0 ) { b += 256 ; } x = ( x << 8 ) + b ; if ( 0 == modulus ) { buf [ pos ++ ] = encodeTable [ ( x >> 18 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 12 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ ( x >> 6 ) & MASK_6BITS ] ; buf [ pos ++ ] = encodeTable [ x & MASK_6BITS ] ; currentLinePos += 4 ; if ( lineLength > 0 && lineLength <= currentLinePos ) { System . arraycopy ( lineSeparator , 0 , buf , pos , lineSeparator . length ) ; pos += lineSeparator . length ; currentLinePos = 0 ; } } } } }
Compress
31
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
101
148
Illegal argument exception when extracting .tgz file
When attempting to unpack a .tgz file, I am receiving the illegal argument exception: java.lang.IllegalArgumentException: Invalid byte 0 at offset 5 in '05412{NUL}11' len=8. This is causing a java.io.IOException: Error detected parsing the header error. This is being thrown when the function TarArchiveInputStream.getNextTarEntry() is called. Here is the code I am using. {code:java} TarArchiveInputStream tarIn = new TarArchiveInputStream( new GZIPInputStream( new BufferedInputStream( new FileInputStream( tempDirPath + fileName)))); TarArchiveEntry entry = tarIn.getNextTarEntry(); while (entry != null) { File path = new File(tempDirPath, entry.getName()); if (entry.isDirectory()) { path.mkdirs(); } else { path.createNewFile(); byte[] read = new byte[2048]; BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(path)); int len; while ((len = tarIn.read(read)) != -1) { bout.write(read, 0, len); System.out.print(new String(read, "UTF-8")); } bout.close(); read = null; } entry = tarIn.getNextTarEntry(); } tarIn.close(); {code} Here is the full stack trace: [2015-02-12T23:17:31.944+0000] [glassfish 4.0] [SEVERE] [] [] [tid: _ThreadID=123 _ThreadName=Thread-4] [timeMillis: 1423783051944] [levelValue: 1000] [[ java.io.IOException: Error detected parsing the header at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:257) at org.unavco.ws.tilt.ExtractTiltFile.extractFile(ExtractTiltFile.java:125) at org.unavco.ws.tilt.ExtractTiltFile.run(ExtractTiltFile.java:59) at org.unavco.ws.cache.ProcessDataFile.getFileData(ProcessDataFile.java:100) at org.unavco.ws.cache.ProcessDataFile.getResultSet(ProcessDataFile.java:81) at org.unavco.ws.tilt.TiltDsClient.write(TiltDsClient.java:47) at org.glassfish.jersey.message.internal.StreamingOutputProvider.writeTo(StreamingOutputProvider.java:76) at org.glassfish.jersey.message.internal.StreamingOutputProvider.writeTo(StreamingOutputProvider.java:58) at org.glassfish.jersey.message.internal.WriterInterceptorExecutor$TerminalWriterInterceptor.aroundWriteTo(WriterInterceptorExecutor.java:194) at org.glassfish.jersey.message.internal.WriterInterceptorExecutor.proceed(WriterInterceptorExecutor.java:139) at org.glassfish.jersey.server.internal.JsonWithPaddingInterceptor.aroundWriteTo(JsonWithPaddingInterceptor.java:103) at org.glassfish.jersey.message.internal.WriterInterceptorExecutor.proceed(WriterInterceptorExecutor.java:139) at org.glassfish.jersey.server.internal.MappableExceptionWrapperInterceptor.aroundWriteTo(MappableExceptionWrapperInterceptor.java:88) at org.glassfish.jersey.message.internal.WriterInterceptorExecutor.proceed(WriterInterceptorExecutor.java:139) at org.glassfish.jersey.message.internal.MessageBodyFactory.writeTo(MessageBodyFactory.java:1005) at org.glassfish.jersey.server.ServerRuntime$Responder.writeResponse(ServerRuntime.java:471) at org.glassfish.jersey.server.ServerRuntime$Responder.processResponse(ServerRuntime.java:333) at org.glassfish.jersey.server.ServerRuntime$Responder.process(ServerRuntime.java:323) at org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:227) at org.glassfish.jersey.internal.Errors$1.call(Errors.java:271) at org.glassfish.jersey.internal.Errors$1.call(Errors.java:267) at org.glassfish.jersey.internal.Errors.process(Errors.java:315) at org.glassfish.jersey.internal.Errors.process(Errors.java:297) at org.glassfish.jersey.internal.Errors.process(Errors.java:267) at org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:317) at org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:198) at org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:946) at org.glassfish.jersey.servlet.WebComponent.service(WebComponent.java:323) at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:372) at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:335) at org.glassfish.jersey.servlet.ServletContainer.service(ServletContainer.java:218) at org.apache.catalina.core.StandardWrapper.service(StandardWrapper.java:1682) at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:344) at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:214) at com.thetransactioncompany.cors.CORSFilter.doFilter(Unknown Source) at com.thetransactioncompany.cors.CORSFilter.doFilter(Unknown Source) at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:256) at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:214) at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:316) at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:160) at org.apache.catalina.core.StandardPipeline.doInvoke(StandardPipeline.java:734) at org.apache.catalina.core.StandardPipeline.invoke(StandardPipeline.java:673) at com.sun.enterprise.web.WebPipeline.invoke(WebPipeline.java:99) at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:174) at org.apache.catalina.connector.CoyoteAdapter.doService(CoyoteAdapter.java:357) at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:260) at com.sun.enterprise.v3.services.impl.ContainerMapper.service(ContainerMapper.java:188) at org.glassfish.grizzly.http.server.HttpHandler.runService(HttpHandler.java:191) at org.glassfish.grizzly.http.server.HttpHandler.doHandle(HttpHandler.java:168) at org.glassfish.grizzly.http.server.HttpServerFilter.handleRead(HttpServerFilter.java:189) at org.glassfish.grizzly.filterchain.ExecutorResolver$9.execute(ExecutorResolver.java:119) at org.glassfish.grizzly.filterchain.DefaultFilterChain.executeFilter(DefaultFilterChain.java:288) at org.glassfish.grizzly.filterchain.DefaultFilterChain.executeChainPart(DefaultFilterChain.java:206) at org.glassfish.grizzly.filterchain.DefaultFilterChain.execute(DefaultFilterChain.java:136) at org.glassfish.grizzly.filterchain.DefaultFilterChain.process(DefaultFilterChain.java:114) at org.glassfish.grizzly.ProcessorExecutor.execute(ProcessorExecutor.java:77) at org.glassfish.grizzly.nio.transport.TCPNIOTransport.fireIOEvent(TCPNIOTransport.java:838) at org.glassfish.grizzly.strategies.AbstractIOStrategy.fireIOEvent(AbstractIOStrategy.java:113) at org.glassfish.grizzly.strategies.WorkerThreadIOStrategy.run0(WorkerThreadIOStrategy.java:115) at org.glassfish.grizzly.strategies.WorkerThreadIOStrategy.access$100(WorkerThreadIOStrategy.java:55) at org.glassfish.grizzly.strategies.WorkerThreadIOStrategy$WorkerThreadRunnable.run(WorkerThreadIOStrategy.java:135) at org.glassfish.grizzly.threadpool.AbstractThreadPool$Worker.doWork(AbstractThreadPool.java:564) at org.glassfish.grizzly.threadpool.AbstractThreadPool$Worker.run(AbstractThreadPool.java:544) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.IllegalArgumentException: Invalid byte 0 at offset 5 in '05412{NUL}11' len=8 at org.apache.commons.compress.archivers.tar.TarUtils.parseOctal(TarUtils.java:138) at org.apache.commons.compress.archivers.tar.TarUtils.parseOctalOrBinary(TarUtils.java:169) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:951) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:940) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.<init>(TarArchiveEntry.java:324) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:255) ... 63 more]]
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; while (start < end && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } for ( ;start < end; start++) { final byte currentByte = buffer[start]; if (currentByte == 0) { break; } // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; while ( start < end && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte == 0 ) { break ; } if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; while (start < end && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; while ( start < end && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
Math
28
src/main/java/org/apache/commons/math3/optimization/linear/SimplexSolver.java
90
154
Not expected UnboundedSolutionException
SimplexSolver throws UnboundedSolutionException when trying to solve minimization linear programming problem. The number of exception thrown depends on the number of variables. In order to see that behavior of SimplexSolver first try to run JUnit test setting a final variable ENTITIES_COUNT = 2 and that will give almost good result and then set it to 15 and you'll get a massive of unbounded exceptions. First iteration is runned with predefined set of input data with which the Solver gives back an appropriate result. The problem itself is well tested by it's authors (mathematicians who I believe know what they developed) using Matlab 10 with no unbounded solutions on the same rules of creatnig random variables values. What is strange to me is the dependence of the number of UnboundedSolutionException exceptions on the number of variables in the problem. The problem is formulated as min(1*t + 0*L) (for every r-th subject) s.t. -q(r) + QL >= 0 x(r)t - XL >= 0 L >= 0 where r = 1..R, L = {l(1), l(2), ..., l(R)} (vector of R rows and 1 column), Q - coefficients matrix MxR X - coefficients matrix NxR
private Integer getPivotRow(SimplexTableau tableau, final int col) { // create a list of all the rows that tie for the lowest score in the minimum ratio test List<Integer> minRatioPositions = new ArrayList<Integer>(); double minRatio = Double.MAX_VALUE; for (int i = tableau.getNumObjectiveFunctions(); i < tableau.getHeight(); i++) { final double rhs = tableau.getEntry(i, tableau.getWidth() - 1); final double entry = tableau.getEntry(i, col); if (Precision.compareTo(entry, 0d, maxUlps) > 0) { final double ratio = rhs / entry; // check if the entry is strictly equal to the current min ratio // do not use a ulp/epsilon check final int cmp = Double.compare(ratio, minRatio); if (cmp == 0) { minRatioPositions.add(i); } else if (cmp < 0) { minRatio = ratio; minRatioPositions = new ArrayList<Integer>(); minRatioPositions.add(i); } } } if (minRatioPositions.size() == 0) { return null; } else if (minRatioPositions.size() > 1) { // there's a degeneracy as indicated by a tie in the minimum ratio test // 1. check if there's an artificial variable that can be forced out of the basis for (Integer row : minRatioPositions) { for (int i = 0; i < tableau.getNumArtificialVariables(); i++) { int column = i + tableau.getArtificialVariableOffset(); final double entry = tableau.getEntry(row, column); if (Precision.equals(entry, 1d, maxUlps) && row.equals(tableau.getBasicRow(column))) { return row; } } } // 2. apply Bland's rule to prevent cycling: // take the row for which the corresponding basic variable has the smallest index // // see http://www.stanford.edu/class/msande310/blandrule.pdf // see http://en.wikipedia.org/wiki/Bland%27s_rule (not equivalent to the above paper) // // Additional heuristic: if we did not get a solution after half of maxIterations // revert to the simple case of just returning the top-most row // This heuristic is based on empirical data gathered while investigating MATH-828. Integer minRow = null; int minIndex = tableau.getWidth(); for (Integer row : minRatioPositions) { int i = tableau.getNumObjectiveFunctions(); for (; i < tableau.getWidth() - 1 && minRow != row; i++) { if (row == tableau.getBasicRow(i)) { if (i < minIndex) { minIndex = i; minRow = row; } } } } return minRow; } return minRatioPositions.get(0); }
private Integer getPivotRow ( SimplexTableau tableau , final int col ) { List < Integer > minRatioPositions = new ArrayList < Integer > ( ) ; double minRatio = Double . MAX_VALUE ; for ( int i = tableau . getNumObjectiveFunctions ( ) ; i < tableau . getHeight ( ) ; i ++ ) { final double rhs = tableau . getEntry ( i , tableau . getWidth ( ) - 1 ) ; final double entry = tableau . getEntry ( i , col ) ; if ( Precision . compareTo ( entry , 0d , maxUlps ) > 0 ) { final double ratio = rhs / entry ; final int cmp = Double . compare ( ratio , minRatio ) ; if ( cmp == 0 ) { minRatioPositions . add ( i ) ; } else if ( cmp < 0 ) { minRatio = ratio ; minRatioPositions = new ArrayList < Integer > ( ) ; minRatioPositions . add ( i ) ; } } } if ( minRatioPositions . size ( ) == 0 ) { return null ; } else if ( minRatioPositions . size ( ) > 1 ) { for ( Integer row : minRatioPositions ) { for ( int i = 0 ; i < tableau . getNumArtificialVariables ( ) ; i ++ ) { int column = i + tableau . getArtificialVariableOffset ( ) ; final double entry = tableau . getEntry ( row , column ) ; if ( Precision . equals ( entry , 1d , maxUlps ) && row . equals ( tableau . getBasicRow ( column ) ) ) { return row ; } } } Integer minRow = null ; int minIndex = tableau . getWidth ( ) ; for ( Integer row : minRatioPositions ) { int i = tableau . getNumObjectiveFunctions ( ) ; for ( ; i < tableau . getWidth ( ) - 1 && minRow != row ; i ++ ) { if ( row == tableau . getBasicRow ( i ) ) { if ( i < minIndex ) { minIndex = i ; minRow = row ; } } } } return minRow ; } return minRatioPositions . get ( 0 ) ; }
private Integer getPivotRow(SimplexTableau tableau, final int col) { // create a list of all the rows that tie for the lowest score in the minimum ratio test List<Integer> minRatioPositions = new ArrayList<Integer>(); double minRatio = Double.MAX_VALUE; for (int i = tableau.getNumObjectiveFunctions(); i < tableau.getHeight(); i++) { final double rhs = tableau.getEntry(i, tableau.getWidth() - 1); final double entry = tableau.getEntry(i, col); if (Precision.compareTo(entry, 0d, maxUlps) > 0) { final double ratio = rhs / entry; // check if the entry is strictly equal to the current min ratio // do not use a ulp/epsilon check final int cmp = Double.compare(ratio, minRatio); if (cmp == 0) { minRatioPositions.add(i); } else if (cmp < 0) { minRatio = ratio; minRatioPositions = new ArrayList<Integer>(); minRatioPositions.add(i); } } } if (minRatioPositions.size() == 0) { return null; } else if (minRatioPositions.size() > 1) { // there's a degeneracy as indicated by a tie in the minimum ratio test // 1. check if there's an artificial variable that can be forced out of the basis if (tableau.getNumArtificialVariables() > 0) { for (Integer row : minRatioPositions) { for (int i = 0; i < tableau.getNumArtificialVariables(); i++) { int column = i + tableau.getArtificialVariableOffset(); final double entry = tableau.getEntry(row, column); if (Precision.equals(entry, 1d, maxUlps) && row.equals(tableau.getBasicRow(column))) { return row; } } } } // 2. apply Bland's rule to prevent cycling: // take the row for which the corresponding basic variable has the smallest index // // see http://www.stanford.edu/class/msande310/blandrule.pdf // see http://en.wikipedia.org/wiki/Bland%27s_rule (not equivalent to the above paper) // // Additional heuristic: if we did not get a solution after half of maxIterations // revert to the simple case of just returning the top-most row // This heuristic is based on empirical data gathered while investigating MATH-828. if (getIterations() < getMaxIterations() / 2) { Integer minRow = null; int minIndex = tableau.getWidth(); for (Integer row : minRatioPositions) { int i = tableau.getNumObjectiveFunctions(); for (; i < tableau.getWidth() - 1 && minRow != row; i++) { if (row == tableau.getBasicRow(i)) { if (i < minIndex) { minIndex = i; minRow = row; } } } } return minRow; } } return minRatioPositions.get(0); }
private Integer getPivotRow ( SimplexTableau tableau , final int col ) { List < Integer > minRatioPositions = new ArrayList < Integer > ( ) ; double minRatio = Double . MAX_VALUE ; for ( int i = tableau . getNumObjectiveFunctions ( ) ; i < tableau . getHeight ( ) ; i ++ ) { final double rhs = tableau . getEntry ( i , tableau . getWidth ( ) - 1 ) ; final double entry = tableau . getEntry ( i , col ) ; if ( Precision . compareTo ( entry , 0d , maxUlps ) > 0 ) { final double ratio = rhs / entry ; final int cmp = Double . compare ( ratio , minRatio ) ; if ( cmp == 0 ) { minRatioPositions . add ( i ) ; } else if ( cmp < 0 ) { minRatio = ratio ; minRatioPositions = new ArrayList < Integer > ( ) ; minRatioPositions . add ( i ) ; } } } if ( minRatioPositions . size ( ) == 0 ) { return null ; } else if ( minRatioPositions . size ( ) > 1 ) { if ( tableau . getNumArtificialVariables ( ) > 0 ) { for ( Integer row : minRatioPositions ) { for ( int i = 0 ; i < tableau . getNumArtificialVariables ( ) ; i ++ ) { int column = i + tableau . getArtificialVariableOffset ( ) ; final double entry = tableau . getEntry ( row , column ) ; if ( Precision . equals ( entry , 1d , maxUlps ) && row . equals ( tableau . getBasicRow ( column ) ) ) { return row ; } } } } if ( getIterations ( ) < getMaxIterations ( ) / 2 ) { Integer minRow = null ; int minIndex = tableau . getWidth ( ) ; for ( Integer row : minRatioPositions ) { int i = tableau . getNumObjectiveFunctions ( ) ; for ( ; i < tableau . getWidth ( ) - 1 && minRow != row ; i ++ ) { if ( row == tableau . getBasicRow ( i ) ) { if ( i < minIndex ) { minIndex = i ; minRow = row ; } } } } return minRow ; } } return minRatioPositions . get ( 0 ) ; }
Compress
8
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
51
87
TarArchiveEntry.parseTarHeader() includes the trailing space/NUL when parsing the octal size
TarArchiveEntry.parseTarHeader() includes the trailing space/NUL when parsing the octal size. Although the size field in the header is 12 bytes, the last byte is supposed to be space or NUL - i.e. only 11 octal digits are allowed for the size.
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; boolean stillPadding = true; int end = offset + length; int start = offset; for (int i = start; i < end; i++){ final byte currentByte = buffer[i]; if (currentByte == 0) { break; } // Skip leading spaces if (currentByte == (byte) ' ' || currentByte == '0') { if (stillPadding) { continue; } if (currentByte == (byte) ' ') { break; } } // Must have trailing NUL or space // May have additional NUL or space stillPadding = false; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; boolean stillPadding = true ; int end = offset + length ; int start = offset ; for ( int i = start ; i < end ; i ++ ) { final byte currentByte = buffer [ i ] ; if ( currentByte == 0 ) { break ; } if ( currentByte == ( byte ) ' ' || currentByte == '0' ) { if ( stillPadding ) { continue ; } if ( currentByte == ( byte ) ' ' ) { break ; } } stillPadding = false ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } boolean allNUL = true; for (int i = start; i < end; i++){ if (buffer[i] != 0){ allNUL = false; break; } } if (allNUL) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Must have trailing NUL or space byte trailer; trailer = buffer[end-1]; if (trailer == 0 || trailer == ' '){ end--; } else { throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, end-1, trailer)); } // May have additional NUL or space trailer = buffer[end-1]; if (trailer == 0 || trailer == ' '){ end--; } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } boolean allNUL = true ; for ( int i = start ; i < end ; i ++ ) { if ( buffer [ i ] != 0 ) { allNUL = false ; break ; } } if ( allNUL ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer ; trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } else { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , end - 1 , trailer ) ) ; } trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
Cli
29
src/java/org/apache/commons/cli/Util.java
63
76
Commons CLI incorrectly stripping leading and trailing quotes
org.apache.commons.cli.Parser.processArgs() calls Util.stripLeadingAndTrailingQuotes() for all argument values. IMHO this is incorrect and totally broken. It is trivial to create a simple test for this. Output: $ java -cp target/clitest.jar Clitest --balloo "this is a \"test\"" Value of argument balloo is 'this is a "test'. The argument 'balloo' should indeed keep its trailing double quote. It is what the shell gives it, so don't try to do something clever to it. The offending code was committed here: http://svn.apache.org/viewvc?view=rev&revision=129874 and has been there for more than 6 years (!). Why was this committed in the first place? The fix is trivial, just get rid of Util.stripLeadingAndTrailingQuotes(), and consequently avoid calling it from Parser.processArgs().
static String stripLeadingAndTrailingQuotes(String str) { if (str.startsWith("\"")) { str = str.substring(1, str.length()); } int length = str.length(); if (str.endsWith("\"")) { str = str.substring(0, length - 1); } return str; }
static String stripLeadingAndTrailingQuotes ( String str ) { if ( str . startsWith ( "\"" ) ) { str = str . substring ( 1 , str . length ( ) ) ; } int length = str . length ( ) ; if ( str . endsWith ( "\"" ) ) { str = str . substring ( 0 , length - 1 ) ; } return str ; }
static String stripLeadingAndTrailingQuotes(String str) { int length = str.length(); if (length > 1 && str.startsWith("\"") && str.endsWith("\"") && str.substring(1, length - 1).indexOf('"') == -1) { str = str.substring(1, length - 1); } return str; }
static String stripLeadingAndTrailingQuotes ( String str ) { int length = str . length ( ) ; if ( length > 1 && str . startsWith ( "\"" ) && str . endsWith ( "\"" ) && str . substring ( 1 , length - 1 ) . indexOf ( '"' ) == - 1 ) { str = str . substring ( 1 , length - 1 ) ; } return str ; }
Compress
27
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
102
150
Incorrect handling of NUL username and group Tar.gz entries
With version 1.8 of commons-compress it's no longer possible to decompress files from an archive if the archive contains entries having null (or being empty?) set as username and/or usergroup. With version 1.7 this still worked now I get this exception: {code} java.io.IOException: Error detected parsing the header at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:249) at TestBed.AppTest.extractNoFileOwner(AppTest.java:30) Caused by: java.lang.IllegalArgumentException: Invalid byte 32 at offset 7 in ' {NUL}' len=8 at org.apache.commons.compress.archivers.tar.TarUtils.parseOctal(TarUtils.java:134) at org.apache.commons.compress.archivers.tar.TarUtils.parseOctalOrBinary(TarUtils.java:173) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:953) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:940) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.<init>(TarArchiveEntry.java:324) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:247) ... 27 more {code} This exception leads to my suspision that the regression was introduced with the fix for this ticket COMPRESS-262, which has a nearly identical exception provided. Some test code you can run to verify it: {code} package TestBed; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.junit.Test; /** * Unit test for simple App. */ public class AppTest { @Test public void extractNoFileOwner() { TarArchiveInputStream tarInputStream = null; try { tarInputStream = new TarArchiveInputStream( new GzipCompressorInputStream( new FileInputStream( new File( "/home/pknobel/redis-dist-2.8.3_1-linux.tar.gz" ) ) ) ); TarArchiveEntry entry; while ( ( entry = tarInputStream.getNextTarEntry() ) != null ) { System.out.println( entry.getName() ); System.out.println(entry.getUserName()+"/"+entry.getGroupName()); } } catch ( FileNotFoundException e ) { e.printStackTrace(); } catch ( IOException e ) { e.printStackTrace(); } } } {code} With 1.7 the TestCase outputed this: {code} redis-dist-2.8.3_1/bin/ / redis-dist-2.8.3_1/bin/redis-server jenkins/jenkins redis-dist-2.8.3_1/bin/redis-cli jenkins/jenkins {code} With 1.8 it's failing once it reaches the null valued entry, which is the first. The archive is created using maven assembly plugin, and I tried the same with maven ant task. Both generating an archive with not set username and groups for at least some entries. You can download the archive from http://heli0s.darktech.org/redis/2.8.3_1/redis-dist-2.8.3_1-linux.tar.gz If you run a tar -tvzf on the file you see this report: {code} drwxr-xr-x 0/0 0 2014-04-18 09:43 redis-dist-2.8.3_1-SNAPSHOT/bin/ -rwxr-xr-x pknobel/pknobel 3824588 2014-01-02 14:58 redis-dist-2.8.3_1-SNAPSHOT/bin/redis-cli -rwxr-xr-x pknobel/pknobel 5217234 2014-01-02 14:58 redis-dist-2.8.3_1-SNAPSHOT/bin/redis-server {code} The user 0/0 probably indicates that it's not set although it's the root user id. A correctly root user file would show up as root/root
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; while (start < end && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } if (start == end) { throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, trailer)); } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; while ( start < end && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } if ( start == end ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , trailer ) ) ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; while (start < end && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; while ( start < end && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
Chart
9
source/org/jfree/data/time/TimeSeries.java
918
956
Error on TimeSeries createCopy() method
The test case at the end fails with : java.lang.IllegalArgumentException: Requires start <= end. The problem is in that the int start and end indexes corresponding to given timePeriod are computed incorectly. Here I would expect an empty serie to be returned, not an exception. This is with jfreechart 1.0.7 public class foo { static public void main(String args[]) { TimeSeries foo = new TimeSeries("foo",Day.class); foo.add(new Day(19,4,2005),1); foo.add(new Day(25,5,2005),1); foo.add(new Day(28,5,2005),1); foo.add(new Day(30,5,2005),1); foo.add(new Day(1,6,2005),1); foo.add(new Day(3,6,2005),1); foo.add(new Day(19,8,2005),1); foo.add(new Day(31,1,2006),1); try \{ TimeSeries bar = foo.createCopy\(new Day\(1,12,2005\),new Day\(18,1,2006\)\); \} catch \(CloneNotSupportedException e\) \{ e.printStackTrace\(\); } }
public TimeSeries createCopy(RegularTimePeriod start, RegularTimePeriod end) throws CloneNotSupportedException { if (start == null) { throw new IllegalArgumentException("Null 'start' argument."); } if (end == null) { throw new IllegalArgumentException("Null 'end' argument."); } if (start.compareTo(end) > 0) { throw new IllegalArgumentException( "Requires start on or before end."); } boolean emptyRange = false; int startIndex = getIndex(start); if (startIndex < 0) { startIndex = -(startIndex + 1); if (startIndex == this.data.size()) { emptyRange = true; // start is after last data item } } int endIndex = getIndex(end); if (endIndex < 0) { // end period is not in original series endIndex = -(endIndex + 1); // this is first item AFTER end period endIndex = endIndex - 1; // so this is last item BEFORE end } if (endIndex < 0) { emptyRange = true; } if (emptyRange) { TimeSeries copy = (TimeSeries) super.clone(); copy.data = new java.util.ArrayList(); return copy; } else { return createCopy(startIndex, endIndex); } }
public TimeSeries createCopy ( RegularTimePeriod start , RegularTimePeriod end ) throws CloneNotSupportedException { if ( start == null ) { throw new IllegalArgumentException ( "Null 'start' argument." ) ; } if ( end == null ) { throw new IllegalArgumentException ( "Null 'end' argument." ) ; } if ( start . compareTo ( end ) > 0 ) { throw new IllegalArgumentException ( "Requires start on or before end." ) ; } boolean emptyRange = false ; int startIndex = getIndex ( start ) ; if ( startIndex < 0 ) { startIndex = - ( startIndex + 1 ) ; if ( startIndex == this . data . size ( ) ) { emptyRange = true ; } } int endIndex = getIndex ( end ) ; if ( endIndex < 0 ) { endIndex = - ( endIndex + 1 ) ; endIndex = endIndex - 1 ; } if ( endIndex < 0 ) { emptyRange = true ; } if ( emptyRange ) { TimeSeries copy = ( TimeSeries ) super . clone ( ) ; copy . data = new java . util . ArrayList ( ) ; return copy ; } else { return createCopy ( startIndex , endIndex ) ; } }
public TimeSeries createCopy(RegularTimePeriod start, RegularTimePeriod end) throws CloneNotSupportedException { if (start == null) { throw new IllegalArgumentException("Null 'start' argument."); } if (end == null) { throw new IllegalArgumentException("Null 'end' argument."); } if (start.compareTo(end) > 0) { throw new IllegalArgumentException( "Requires start on or before end."); } boolean emptyRange = false; int startIndex = getIndex(start); if (startIndex < 0) { startIndex = -(startIndex + 1); if (startIndex == this.data.size()) { emptyRange = true; // start is after last data item } } int endIndex = getIndex(end); if (endIndex < 0) { // end period is not in original series endIndex = -(endIndex + 1); // this is first item AFTER end period endIndex = endIndex - 1; // so this is last item BEFORE end } if ((endIndex < 0) || (endIndex < startIndex)) { emptyRange = true; } if (emptyRange) { TimeSeries copy = (TimeSeries) super.clone(); copy.data = new java.util.ArrayList(); return copy; } else { return createCopy(startIndex, endIndex); } }
public TimeSeries createCopy ( RegularTimePeriod start , RegularTimePeriod end ) throws CloneNotSupportedException { if ( start == null ) { throw new IllegalArgumentException ( "Null 'start' argument." ) ; } if ( end == null ) { throw new IllegalArgumentException ( "Null 'end' argument." ) ; } if ( start . compareTo ( end ) > 0 ) { throw new IllegalArgumentException ( "Requires start on or before end." ) ; } boolean emptyRange = false ; int startIndex = getIndex ( start ) ; if ( startIndex < 0 ) { startIndex = - ( startIndex + 1 ) ; if ( startIndex == this . data . size ( ) ) { emptyRange = true ; } } int endIndex = getIndex ( end ) ; if ( endIndex < 0 ) { endIndex = - ( endIndex + 1 ) ; endIndex = endIndex - 1 ; } if ( ( endIndex < 0 ) || ( endIndex < startIndex ) ) { emptyRange = true ; } if ( emptyRange ) { TimeSeries copy = ( TimeSeries ) super . clone ( ) ; copy . data = new java . util . ArrayList ( ) ; return copy ; } else { return createCopy ( startIndex , endIndex ) ; } }
Mockito
37
src/org/mockito/internal/stubbing/answers/AnswersValidator.java
15
28
Inform the user immediately when she tries to 'callRealMethod()' on a mock of a interface
Inform the user immediately when she tries to 'callRealMethod()' on a mock of a interface: //fail fast when: when(mockOfAnIterface.doStuff()).thenCallRealMethod();
public void validate(Answer<?> answer, Invocation invocation) { if (answer instanceof ThrowsException) { validateException((ThrowsException) answer, invocation); } if (answer instanceof Returns) { validateReturnValue((Returns) answer, invocation); } if (answer instanceof DoesNothing) { validateDoNothing((DoesNothing) answer, invocation); } }
public void validate ( Answer < ? > answer , Invocation invocation ) { if ( answer instanceof ThrowsException ) { validateException ( ( ThrowsException ) answer , invocation ) ; } if ( answer instanceof Returns ) { validateReturnValue ( ( Returns ) answer , invocation ) ; } if ( answer instanceof DoesNothing ) { validateDoNothing ( ( DoesNothing ) answer , invocation ) ; } }
public void validate(Answer<?> answer, Invocation invocation) { if (answer instanceof ThrowsException) { validateException((ThrowsException) answer, invocation); } if (answer instanceof Returns) { validateReturnValue((Returns) answer, invocation); } if (answer instanceof DoesNothing) { validateDoNothing((DoesNothing) answer, invocation); } if (answer instanceof CallsRealMethods) { validateMockingConcreteClass((CallsRealMethods) answer, invocation); } }
public void validate ( Answer < ? > answer , Invocation invocation ) { if ( answer instanceof ThrowsException ) { validateException ( ( ThrowsException ) answer , invocation ) ; } if ( answer instanceof Returns ) { validateReturnValue ( ( Returns ) answer , invocation ) ; } if ( answer instanceof DoesNothing ) { validateDoNothing ( ( DoesNothing ) answer , invocation ) ; } if ( answer instanceof CallsRealMethods ) { validateMockingConcreteClass ( ( CallsRealMethods ) answer , invocation ) ; } }
Math
105
src/java/org/apache/commons/math/stat/regression/SimpleRegression.java
263
265
[math] SimpleRegression getSumSquaredErrors
getSumSquaredErrors returns -ve value. See test below: public void testSimpleRegression() { double[] y = { 8915.102, 8919.302, 8923.502}; double[] x = { 1.107178495, 1.107264895, 1.107351295}; double[] x2 = { 1.107178495E2, 1.107264895E2, 1.107351295E2}; SimpleRegression reg = new SimpleRegression(); for (int i = 0; i < x.length; i++) { reg.addData(x[i],y[i]); } assertTrue(reg.getSumSquaredErrors() >= 0.0); // OK reg.clear(); for (int i = 0; i < x.length; i++) { reg.addData(x2[i],y[i]); } assertTrue(reg.getSumSquaredErrors() >= 0.0); // FAIL }
public double getSumSquaredErrors() { return sumYY - sumXY * sumXY / sumXX; }
public double getSumSquaredErrors ( ) { return sumYY - sumXY * sumXY / sumXX ; }
public double getSumSquaredErrors() { return Math.max(0d, sumYY - sumXY * sumXY / sumXX); }
public double getSumSquaredErrors ( ) { return Math . max ( 0d , sumYY - sumXY * sumXY / sumXX ) ; }
JacksonDatabind
36
src/main/java/com/fasterxml/jackson/databind/util/StdDateFormat.java
545
558
Allow use of `StdDateFormat.setLenient()`
ObjectMapper uses the StdDateFormat for date serialization. Jackson date parsing is lenient by default, so 2015-01-32 gets parsed as 2015-02-01. Jackson’s StdDateParser is matching default behavior of DateParser. StdDateParser wasn’t really designed for extension to just enable strict date parsing. If it were, we could just call objectMapper.setDateFormat(new StdDateFormat().setLenient(false)). But StdDateFomrat doesn't support setting lenient to false. And i.e. the reason date like 2015-01-32 gets parsed as 2015-02-01 ad Jackson date parsing is lenient by defualt. Can StdDateFormat can be enhanced to support to non lenient date parsing?
private final static DateFormat _cloneFormat(DateFormat df, String format, TimeZone tz, Locale loc, Boolean lenient) { if (!loc.equals(DEFAULT_LOCALE)) { df = new SimpleDateFormat(format, loc); df.setTimeZone((tz == null) ? DEFAULT_TIMEZONE : tz); } else { df = (DateFormat) df.clone(); if (tz != null) { df.setTimeZone(tz); } } return df; }
private final static DateFormat _cloneFormat ( DateFormat df , String format , TimeZone tz , Locale loc , Boolean lenient ) { if ( ! loc . equals ( DEFAULT_LOCALE ) ) { df = new SimpleDateFormat ( format , loc ) ; df . setTimeZone ( ( tz == null ) ? DEFAULT_TIMEZONE : tz ) ; } else { df = ( DateFormat ) df . clone ( ) ; if ( tz != null ) { df . setTimeZone ( tz ) ; } } return df ; }
private final static DateFormat _cloneFormat(DateFormat df, String format, TimeZone tz, Locale loc, Boolean lenient) { if (!loc.equals(DEFAULT_LOCALE)) { df = new SimpleDateFormat(format, loc); df.setTimeZone((tz == null) ? DEFAULT_TIMEZONE : tz); } else { df = (DateFormat) df.clone(); if (tz != null) { df.setTimeZone(tz); } } if (lenient != null) { df.setLenient(lenient.booleanValue()); } return df; }
private final static DateFormat _cloneFormat ( DateFormat df , String format , TimeZone tz , Locale loc , Boolean lenient ) { if ( ! loc . equals ( DEFAULT_LOCALE ) ) { df = new SimpleDateFormat ( format , loc ) ; df . setTimeZone ( ( tz == null ) ? DEFAULT_TIMEZONE : tz ) ; } else { df = ( DateFormat ) df . clone ( ) ; if ( tz != null ) { df . setTimeZone ( tz ) ; } } if ( lenient != null ) { df . setLenient ( lenient . booleanValue ( ) ) ; } return df ; }
Codec
17
src/main/java/org/apache/commons/codec/binary/StringUtils.java
338
340
StringUtils.newStringxxx(null) should return null, not NPE
Method calls such as StringUtils.newStringIso8859_1(null) should return null, not NPE. It looks like this capability was lost with the fix for CODEC-136, i.e. http://svn.apache.org/viewvc?rev=1306366&view=rev Several methods were changed from {code} return StringUtils.newString(bytes, CharEncoding.xxx); to return new String(bytes, Charsets.xxx); {code} The new code should have been: {code} return newString(bytes, Charsets.xxx); {code} The newString method handles null input. There were no tests for null input so the change in behaviour was missed.
public static String newStringIso8859_1(final byte[] bytes) { return new String(bytes, Charsets.ISO_8859_1); }
public static String newStringIso8859_1 ( final byte [ ] bytes ) { return new String ( bytes , Charsets . ISO_8859_1 ) ; }
public static String newStringIso8859_1(final byte[] bytes) { return newString(bytes, Charsets.ISO_8859_1); }
public static String newStringIso8859_1 ( final byte [ ] bytes ) { return newString ( bytes , Charsets . ISO_8859_1 ) ; }
Math
9
src/main/java/org/apache/commons/math3/geometry/euclidean/threed/Line.java
86
89
Line.revert() is imprecise
Line.revert() only maintains ~10 digits for the direction. This becomes an issue when the line's position is evaluated far from the origin. A simple fix would be to use Vector3D.negate() for the direction. Also, is there a reason why Line is not immutable? It is just comprised of two vectors.
public Line revert() { final Line reverted = new Line(zero, zero.subtract(direction)); return reverted; }
public Line revert ( ) { final Line reverted = new Line ( zero , zero . subtract ( direction ) ) ; return reverted ; }
public Line revert() { final Line reverted = new Line(this); reverted.direction = reverted.direction.negate(); return reverted; }
public Line revert ( ) { final Line reverted = new Line ( this ) ; reverted . direction = reverted . direction . negate ( ) ; return reverted ; }
Math
53
src/main/java/org/apache/commons/math/complex/Complex.java
150
155
Complex Add and Subtract handle NaN arguments differently, but javadoc contracts are the same
For both Complex add and subtract, the javadoc states that {code} * If either this or <code>rhs</code> has a NaN value in either part, * {@link #NaN} is returned; otherwise Inifinite and NaN values are * returned in the parts of the result according to the rules for * {@link java.lang.Double} arithmetic {code} Subtract includes an isNaN test and returns Complex.NaN if either complex argument isNaN; but add omits this test. The test should be added to the add implementation (actually restored, since this looks like a code merge problem going back to 1.1).
public Complex add(Complex rhs) throws NullArgumentException { MathUtils.checkNotNull(rhs); return createComplex(real + rhs.getReal(), imaginary + rhs.getImaginary()); }
public Complex add ( Complex rhs ) throws NullArgumentException { MathUtils . checkNotNull ( rhs ) ; return createComplex ( real + rhs . getReal ( ) , imaginary + rhs . getImaginary ( ) ) ; }
public Complex add(Complex rhs) throws NullArgumentException { MathUtils.checkNotNull(rhs); if (isNaN || rhs.isNaN) { return NaN; } return createComplex(real + rhs.getReal(), imaginary + rhs.getImaginary()); }
public Complex add ( Complex rhs ) throws NullArgumentException { MathUtils . checkNotNull ( rhs ) ; if ( isNaN || rhs . isNaN ) { return NaN ; } return createComplex ( real + rhs . getReal ( ) , imaginary + rhs . getImaginary ( ) ) ; }
JxPath
22
src/java/org/apache/commons/jxpath/ri/model/dom/DOMNodePointer.java
672
697
Resetting the default namespace causes a serious endless loop when requesting .asPath() on a node.
sample smaller case: {code} <...> <b:foo xmlns:b="bla" xmlns="test111"> <!-- No nodes are placed in the tree within ns "test111" but the attribute is still there.--> <b:bar>a</b:bar> <!-- is in ns 'bla' --> <test xmlns=""></test> <!-- does not have a namespace --> </b:foo> </...> {code} when requesting .asPath() on the 'test' node, it loops in org.apache.commons.jxpath.ri.NamespaceResolver.getPrefix(NodePointer, String), and if it didn't loop it would create a wrong xpath '//b:fo/null:test' DOMNodePointer.asPath(). So I think that the fix should be in org.apache.commons.jxpath.ri.model.dom.DOMNodePointer.asPath() {code} .... String ln = DOMNodePointer.getLocalName(node); String nsURI = getNamespaceURI(); if (nsURI == null) { buffer.append(ln); buffer.append('['); buffer.append(getRelativePositionByName()).append(']'); } else { String prefix = getNamespaceResolver().getPrefix(nsURI); if (prefix != null) { ... {code} should become {code} ... String ln = DOMNodePointer.getLocalName(node); String nsURI = getNamespaceURI(); if (nsURI == null || nsURI.length() == 0) { // check for empty string which means that the node doesn't have a namespace. buffer.append(ln); buffer.append('['); buffer.append(getRelativePositionByName()).append(']'); } else { String prefix = getNamespaceResolver().getPrefix(nsURI); if (prefix != null) { ... {code}
public static String getNamespaceURI(Node node) { if (node instanceof Document) { node = ((Document) node).getDocumentElement(); } Element element = (Element) node; String uri = element.getNamespaceURI(); if (uri == null) { String prefix = getPrefix(node); String qname = prefix == null ? "xmlns" : "xmlns:" + prefix; Node aNode = node; while (aNode != null) { if (aNode.getNodeType() == Node.ELEMENT_NODE) { Attr attr = ((Element) aNode).getAttributeNode(qname); if (attr != null) { return attr.getValue(); } } aNode = aNode.getParentNode(); } return null; } return uri; }
public static String getNamespaceURI ( Node node ) { if ( node instanceof Document ) { node = ( ( Document ) node ) . getDocumentElement ( ) ; } Element element = ( Element ) node ; String uri = element . getNamespaceURI ( ) ; if ( uri == null ) { String prefix = getPrefix ( node ) ; String qname = prefix == null ? "xmlns" : "xmlns:" + prefix ; Node aNode = node ; while ( aNode != null ) { if ( aNode . getNodeType ( ) == Node . ELEMENT_NODE ) { Attr attr = ( ( Element ) aNode ) . getAttributeNode ( qname ) ; if ( attr != null ) { return attr . getValue ( ) ; } } aNode = aNode . getParentNode ( ) ; } return null ; } return uri ; }
public static String getNamespaceURI(Node node) { if (node instanceof Document) { node = ((Document) node).getDocumentElement(); } Element element = (Element) node; String uri = element.getNamespaceURI(); if (uri == null) { String prefix = getPrefix(node); String qname = prefix == null ? "xmlns" : "xmlns:" + prefix; Node aNode = node; while (aNode != null) { if (aNode.getNodeType() == Node.ELEMENT_NODE) { Attr attr = ((Element) aNode).getAttributeNode(qname); if (attr != null) { uri = attr.getValue(); break; } } aNode = aNode.getParentNode(); } } return "".equals(uri) ? null : uri; }
public static String getNamespaceURI ( Node node ) { if ( node instanceof Document ) { node = ( ( Document ) node ) . getDocumentElement ( ) ; } Element element = ( Element ) node ; String uri = element . getNamespaceURI ( ) ; if ( uri == null ) { String prefix = getPrefix ( node ) ; String qname = prefix == null ? "xmlns" : "xmlns:" + prefix ; Node aNode = node ; while ( aNode != null ) { if ( aNode . getNodeType ( ) == Node . ELEMENT_NODE ) { Attr attr = ( ( Element ) aNode ) . getAttributeNode ( qname ) ; if ( attr != null ) { uri = attr . getValue ( ) ; break ; } } aNode = aNode . getParentNode ( ) ; } } return "" . equals ( uri ) ? null : uri ; }
Math
45
src/main/java/org/apache/commons/math/linear/OpenMapRealMatrix.java
48
53
Integer overflow in OpenMapRealMatrix
computeKey() has an integer overflow. Since it is a sparse matrix, this is quite easily encountered long before heap space is exhausted. The attached code demonstrates the problem, which could potentially be a security vulnerability (for example, if one was to use this matrix to store access control information). Workaround: never create an OpenMapRealMatrix with more cells than are addressable with an int.
public OpenMapRealMatrix(int rowDimension, int columnDimension) { super(rowDimension, columnDimension); this.rows = rowDimension; this.columns = columnDimension; this.entries = new OpenIntToDoubleHashMap(0.0); }
public OpenMapRealMatrix ( int rowDimension , int columnDimension ) { super ( rowDimension , columnDimension ) ; this . rows = rowDimension ; this . columns = columnDimension ; this . entries = new OpenIntToDoubleHashMap ( 0.0 ) ; }
public OpenMapRealMatrix(int rowDimension, int columnDimension) { super(rowDimension, columnDimension); long lRow = (long) rowDimension; long lCol = (long) columnDimension; if (lRow * lCol >= (long) Integer.MAX_VALUE) { throw new NumberIsTooLargeException(lRow * lCol, Integer.MAX_VALUE, false); } this.rows = rowDimension; this.columns = columnDimension; this.entries = new OpenIntToDoubleHashMap(0.0); }
public OpenMapRealMatrix ( int rowDimension , int columnDimension ) { super ( rowDimension , columnDimension ) ; long lRow = ( long ) rowDimension ; long lCol = ( long ) columnDimension ; if ( lRow * lCol >= ( long ) Integer . MAX_VALUE ) { throw new NumberIsTooLargeException ( lRow * lCol , Integer . MAX_VALUE , false ) ; } this . rows = rowDimension ; this . columns = columnDimension ; this . entries = new OpenIntToDoubleHashMap ( 0.0 ) ; }
JacksonDatabind
77
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
96
145
Jackson Deserializer security vulnerability via default typing (CVE-2017-7525)
I have send email to info@fasterxml.com
@Override public JsonDeserializer<Object> createBeanDeserializer(DeserializationContext ctxt, JavaType type, BeanDescription beanDesc) throws JsonMappingException { final DeserializationConfig config = ctxt.getConfig(); // We may also have custom overrides: JsonDeserializer<Object> custom = _findCustomBeanDeserializer(type, config, beanDesc); if (custom != null) { return custom; } /* One more thing to check: do we have an exception type * (Throwable or its sub-classes)? If so, need slightly * different handling. */ if (type.isThrowable()) { return buildThrowableDeserializer(ctxt, type, beanDesc); } /* Or, for abstract types, may have alternate means for resolution * (defaulting, materialization) */ // 29-Nov-2015, tatu: Also, filter out calls to primitive types, they are // not something we could materialize anything for if (type.isAbstract() && !type.isPrimitive()) { // Let's make it possible to materialize abstract types. JavaType concreteType = materializeAbstractType(ctxt, type, beanDesc); if (concreteType != null) { /* important: introspect actual implementation (abstract class or * interface doesn't have constructors, for one) */ beanDesc = config.introspect(concreteType); return buildBeanDeserializer(ctxt, concreteType, beanDesc); } } // Otherwise, may want to check handlers for standard types, from superclass: @SuppressWarnings("unchecked") JsonDeserializer<Object> deser = (JsonDeserializer<Object>) findStdDeserializer(ctxt, type, beanDesc); if (deser != null) { return deser; } // Otherwise: could the class be a Bean class? If not, bail out if (!isPotentialBeanType(type.getRawClass())) { return null; } // For checks like [databind#1599] // Use generic bean introspection to build deserializer return buildBeanDeserializer(ctxt, type, beanDesc); }
@ Override public JsonDeserializer < Object > createBeanDeserializer ( DeserializationContext ctxt , JavaType type , BeanDescription beanDesc ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; JsonDeserializer < Object > custom = _findCustomBeanDeserializer ( type , config , beanDesc ) ; if ( custom != null ) { return custom ; } if ( type . isThrowable ( ) ) { return buildThrowableDeserializer ( ctxt , type , beanDesc ) ; } if ( type . isAbstract ( ) && ! type . isPrimitive ( ) ) { JavaType concreteType = materializeAbstractType ( ctxt , type , beanDesc ) ; if ( concreteType != null ) { beanDesc = config . introspect ( concreteType ) ; return buildBeanDeserializer ( ctxt , concreteType , beanDesc ) ; } } @ SuppressWarnings ( "unchecked" ) JsonDeserializer < Object > deser = ( JsonDeserializer < Object > ) findStdDeserializer ( ctxt , type , beanDesc ) ; if ( deser != null ) { return deser ; } if ( ! isPotentialBeanType ( type . getRawClass ( ) ) ) { return null ; } return buildBeanDeserializer ( ctxt , type , beanDesc ) ; }
@Override public JsonDeserializer<Object> createBeanDeserializer(DeserializationContext ctxt, JavaType type, BeanDescription beanDesc) throws JsonMappingException { final DeserializationConfig config = ctxt.getConfig(); // We may also have custom overrides: JsonDeserializer<Object> custom = _findCustomBeanDeserializer(type, config, beanDesc); if (custom != null) { return custom; } /* One more thing to check: do we have an exception type * (Throwable or its sub-classes)? If so, need slightly * different handling. */ if (type.isThrowable()) { return buildThrowableDeserializer(ctxt, type, beanDesc); } /* Or, for abstract types, may have alternate means for resolution * (defaulting, materialization) */ // 29-Nov-2015, tatu: Also, filter out calls to primitive types, they are // not something we could materialize anything for if (type.isAbstract() && !type.isPrimitive()) { // Let's make it possible to materialize abstract types. JavaType concreteType = materializeAbstractType(ctxt, type, beanDesc); if (concreteType != null) { /* important: introspect actual implementation (abstract class or * interface doesn't have constructors, for one) */ beanDesc = config.introspect(concreteType); return buildBeanDeserializer(ctxt, concreteType, beanDesc); } } // Otherwise, may want to check handlers for standard types, from superclass: @SuppressWarnings("unchecked") JsonDeserializer<Object> deser = (JsonDeserializer<Object>) findStdDeserializer(ctxt, type, beanDesc); if (deser != null) { return deser; } // Otherwise: could the class be a Bean class? If not, bail out if (!isPotentialBeanType(type.getRawClass())) { return null; } // For checks like [databind#1599] checkIllegalTypes(ctxt, type, beanDesc); // Use generic bean introspection to build deserializer return buildBeanDeserializer(ctxt, type, beanDesc); }
@ Override public JsonDeserializer < Object > createBeanDeserializer ( DeserializationContext ctxt , JavaType type , BeanDescription beanDesc ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; JsonDeserializer < Object > custom = _findCustomBeanDeserializer ( type , config , beanDesc ) ; if ( custom != null ) { return custom ; } if ( type . isThrowable ( ) ) { return buildThrowableDeserializer ( ctxt , type , beanDesc ) ; } if ( type . isAbstract ( ) && ! type . isPrimitive ( ) ) { JavaType concreteType = materializeAbstractType ( ctxt , type , beanDesc ) ; if ( concreteType != null ) { beanDesc = config . introspect ( concreteType ) ; return buildBeanDeserializer ( ctxt , concreteType , beanDesc ) ; } } @ SuppressWarnings ( "unchecked" ) JsonDeserializer < Object > deser = ( JsonDeserializer < Object > ) findStdDeserializer ( ctxt , type , beanDesc ) ; if ( deser != null ) { return deser ; } if ( ! isPotentialBeanType ( type . getRawClass ( ) ) ) { return null ; } checkIllegalTypes ( ctxt , type , beanDesc ) ; return buildBeanDeserializer ( ctxt , type , beanDesc ) ; }
JacksonDatabind
98
src/main/java/com/fasterxml/jackson/databind/deser/impl/ExternalTypeHandler.java
255
311
External property polymorphic deserialization does not work with enums
versions: Jackson 2.8.1, Jackson-module-kotlin 2.8.1 Attempting to deserialize a class using external_property. In my case, the property is an Enum type with values matching the type name. Now that issue #999 is fixed, I thought this would work, but now I'm getting a different error: ``` Exception in thread "main" com.fasterxml.jackson.databind.JsonMappingException: Can not construct instance of enum.Invite, problem: argument type mismatch at [Source: { "kind": "CONTACT", "to": { "name": "Foo" } }; line: 6, column: 1] at com.fasterxml.jackson.databind.JsonMappingException.from(JsonMappingException.java:268) at com.fasterxml.jackson.databind.DeserializationContext.instantiationException(DeserializationContext.java:1405) at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.wrapAsJsonMappingException(StdValueInstantiator.java:468) at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.rewrapCtorProblem(StdValueInstantiator.java:487) at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:276) at com.fasterxml.jackson.module.kotlin.KotlinValueInstantiator.createFromObjectWith(KotlinValueInstantiator.kt:30) at com.fasterxml.jackson.databind.deser.impl.PropertyBasedCreator.build(PropertyBasedCreator.java:135) at com.fasterxml.jackson.databind.deser.impl.ExternalTypeHandler.complete(ExternalTypeHandler.java:225) at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeUsingPropertyBasedWithExternalTypeId(BeanDeserializer.java:937) at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeWithExternalTypeId(BeanDeserializer.java:792) at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:312) at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:148) at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3789) at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2852) at enum.Reproduction_KindEnumKt.main(Reproduction-KindEnum.kt:49) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147) Caused by: java.lang.IllegalArgumentException: argument type mismatch at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at com.fasterxml.jackson.databind.introspect.AnnotatedConstructor.call(AnnotatedConstructor.java:124) at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:274) ... 15 more Process finished with exit code 1 ``` Here is the reproduction recipe: https://github.com/rocketraman/jackson-issue-enum-polymorphism/blob/master/src/main/kotlin/enumtype/Reproduction-KindEnum.kt
public Object complete(JsonParser p, DeserializationContext ctxt, PropertyValueBuffer buffer, PropertyBasedCreator creator) throws IOException { // first things first: deserialize all data buffered: final int len = _properties.length; Object[] values = new Object[len]; for (int i = 0; i < len; ++i) { String typeId = _typeIds[i]; final ExtTypedProperty extProp = _properties[i]; if (typeId == null) { // let's allow missing both type and property (may already have been set, too) if (_tokens[i] == null) { continue; } // but not just one // 26-Oct-2012, tatu: As per [databind#94], must allow use of 'defaultImpl' if (!extProp.hasDefaultType()) { ctxt.reportInputMismatch(_beanType, "Missing external type id property '%s'", extProp.getTypePropertyName()); } else { typeId = extProp.getDefaultTypeId(); } } else if (_tokens[i] == null) { SettableBeanProperty prop = extProp.getProperty(); ctxt.reportInputMismatch(_beanType, "Missing property '%s' for external type id '%s'", prop.getName(), _properties[i].getTypePropertyName()); } values[i] = _deserialize(p, ctxt, i, typeId); final SettableBeanProperty prop = extProp.getProperty(); // also: if it's creator prop, fill in if (prop.getCreatorIndex() >= 0) { buffer.assignParameter(prop, values[i]); // [databind#999] And maybe there's creator property for type id too? SettableBeanProperty typeProp = extProp.getTypeProperty(); // for now, should only be needed for creator properties, too if ((typeProp != null) && (typeProp.getCreatorIndex() >= 0)) { // 31-May-2018, tatu: [databind#1328] if id is NOT plain `String`, need to // apply deserializer... fun fun. buffer.assignParameter(typeProp, typeId); } } } Object bean = creator.build(ctxt, buffer); // third: assign non-creator properties for (int i = 0; i < len; ++i) { SettableBeanProperty prop = _properties[i].getProperty(); if (prop.getCreatorIndex() < 0) { prop.set(bean, values[i]); } } return bean; }
public Object complete ( JsonParser p , DeserializationContext ctxt , PropertyValueBuffer buffer , PropertyBasedCreator creator ) throws IOException { final int len = _properties . length ; Object [ ] values = new Object [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { String typeId = _typeIds [ i ] ; final ExtTypedProperty extProp = _properties [ i ] ; if ( typeId == null ) { if ( _tokens [ i ] == null ) { continue ; } if ( ! extProp . hasDefaultType ( ) ) { ctxt . reportInputMismatch ( _beanType , "Missing external type id property '%s'" , extProp . getTypePropertyName ( ) ) ; } else { typeId = extProp . getDefaultTypeId ( ) ; } } else if ( _tokens [ i ] == null ) { SettableBeanProperty prop = extProp . getProperty ( ) ; ctxt . reportInputMismatch ( _beanType , "Missing property '%s' for external type id '%s'" , prop . getName ( ) , _properties [ i ] . getTypePropertyName ( ) ) ; } values [ i ] = _deserialize ( p , ctxt , i , typeId ) ; final SettableBeanProperty prop = extProp . getProperty ( ) ; if ( prop . getCreatorIndex ( ) >= 0 ) { buffer . assignParameter ( prop , values [ i ] ) ; SettableBeanProperty typeProp = extProp . getTypeProperty ( ) ; if ( ( typeProp != null ) && ( typeProp . getCreatorIndex ( ) >= 0 ) ) { buffer . assignParameter ( typeProp , typeId ) ; } } } Object bean = creator . build ( ctxt , buffer ) ; for ( int i = 0 ; i < len ; ++ i ) { SettableBeanProperty prop = _properties [ i ] . getProperty ( ) ; if ( prop . getCreatorIndex ( ) < 0 ) { prop . set ( bean , values [ i ] ) ; } } return bean ; }
public Object complete(JsonParser p, DeserializationContext ctxt, PropertyValueBuffer buffer, PropertyBasedCreator creator) throws IOException { // first things first: deserialize all data buffered: final int len = _properties.length; Object[] values = new Object[len]; for (int i = 0; i < len; ++i) { String typeId = _typeIds[i]; final ExtTypedProperty extProp = _properties[i]; if (typeId == null) { // let's allow missing both type and property (may already have been set, too) if (_tokens[i] == null) { continue; } // but not just one // 26-Oct-2012, tatu: As per [databind#94], must allow use of 'defaultImpl' if (!extProp.hasDefaultType()) { ctxt.reportInputMismatch(_beanType, "Missing external type id property '%s'", extProp.getTypePropertyName()); } else { typeId = extProp.getDefaultTypeId(); } } else if (_tokens[i] == null) { SettableBeanProperty prop = extProp.getProperty(); ctxt.reportInputMismatch(_beanType, "Missing property '%s' for external type id '%s'", prop.getName(), _properties[i].getTypePropertyName()); } values[i] = _deserialize(p, ctxt, i, typeId); final SettableBeanProperty prop = extProp.getProperty(); // also: if it's creator prop, fill in if (prop.getCreatorIndex() >= 0) { buffer.assignParameter(prop, values[i]); // [databind#999] And maybe there's creator property for type id too? SettableBeanProperty typeProp = extProp.getTypeProperty(); // for now, should only be needed for creator properties, too if ((typeProp != null) && (typeProp.getCreatorIndex() >= 0)) { // 31-May-2018, tatu: [databind#1328] if id is NOT plain `String`, need to // apply deserializer... fun fun. final Object v; if (typeProp.getType().hasRawClass(String.class)) { v = typeId; } else { TokenBuffer tb = new TokenBuffer(p, ctxt); tb.writeString(typeId); v = typeProp.getValueDeserializer().deserialize(tb.asParserOnFirstToken(), ctxt); tb.close(); } buffer.assignParameter(typeProp, v); } } } Object bean = creator.build(ctxt, buffer); // third: assign non-creator properties for (int i = 0; i < len; ++i) { SettableBeanProperty prop = _properties[i].getProperty(); if (prop.getCreatorIndex() < 0) { prop.set(bean, values[i]); } } return bean; }
public Object complete ( JsonParser p , DeserializationContext ctxt , PropertyValueBuffer buffer , PropertyBasedCreator creator ) throws IOException { final int len = _properties . length ; Object [ ] values = new Object [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { String typeId = _typeIds [ i ] ; final ExtTypedProperty extProp = _properties [ i ] ; if ( typeId == null ) { if ( _tokens [ i ] == null ) { continue ; } if ( ! extProp . hasDefaultType ( ) ) { ctxt . reportInputMismatch ( _beanType , "Missing external type id property '%s'" , extProp . getTypePropertyName ( ) ) ; } else { typeId = extProp . getDefaultTypeId ( ) ; } } else if ( _tokens [ i ] == null ) { SettableBeanProperty prop = extProp . getProperty ( ) ; ctxt . reportInputMismatch ( _beanType , "Missing property '%s' for external type id '%s'" , prop . getName ( ) , _properties [ i ] . getTypePropertyName ( ) ) ; } values [ i ] = _deserialize ( p , ctxt , i , typeId ) ; final SettableBeanProperty prop = extProp . getProperty ( ) ; if ( prop . getCreatorIndex ( ) >= 0 ) { buffer . assignParameter ( prop , values [ i ] ) ; SettableBeanProperty typeProp = extProp . getTypeProperty ( ) ; if ( ( typeProp != null ) && ( typeProp . getCreatorIndex ( ) >= 0 ) ) { final Object v ; if ( typeProp . getType ( ) . hasRawClass ( String . class ) ) { v = typeId ; } else { TokenBuffer tb = new TokenBuffer ( p , ctxt ) ; tb . writeString ( typeId ) ; v = typeProp . getValueDeserializer ( ) . deserialize ( tb . asParserOnFirstToken ( ) , ctxt ) ; tb . close ( ) ; } buffer . assignParameter ( typeProp , v ) ; } } } Object bean = creator . build ( ctxt , buffer ) ; for ( int i = 0 ; i < len ; ++ i ) { SettableBeanProperty prop = _properties [ i ] . getProperty ( ) ; if ( prop . getCreatorIndex ( ) < 0 ) { prop . set ( bean , values [ i ] ) ; } } return bean ; }
JacksonDatabind
20
src/main/java/com/fasterxml/jackson/databind/node/ObjectNode.java
324
334
Presence of PropertyNamingStrategy Makes Deserialization Fail
I originally came across this issue using Dropwizard - https://github.com/dropwizard/dropwizard/issues/1095. But it looks like this is a Jackson issue. Here's the rerproducer: ``` java public class TestPropertyNamingStrategyIssue { public static class ClassWithObjectNodeField { public String id; public ObjectNode json; } @Test public void reproducer() throws Exception { ObjectMapper mapper = new ObjectMapper(); mapper.setPropertyNamingStrategy(PropertyNamingStrategy.LOWER_CASE); ClassWithObjectNodeField deserialized = mapper.readValue( "{ \"id\": \"1\", \"json\": { \"foo\": \"bar\", \"baz\": \"bing\" } }", ClassWithObjectNodeField.class); } } ``` Looks like the presence of any PropertyNamingStrategy make deserialization to ObjectNode fail. This works fine if I remove the property naming strategy.
public JsonNode setAll(Map<String,? extends JsonNode> properties) { for (Map.Entry<String,? extends JsonNode> en : properties.entrySet()) { JsonNode n = en.getValue(); if (n == null) { n = nullNode(); } _children.put(en.getKey(), n); } return this; }
public JsonNode setAll ( Map < String , ? extends JsonNode > properties ) { for ( Map . Entry < String , ? extends JsonNode > en : properties . entrySet ( ) ) { JsonNode n = en . getValue ( ) ; if ( n == null ) { n = nullNode ( ) ; } _children . put ( en . getKey ( ) , n ) ; } return this ; }
@JsonIgnore // work-around for [databind#815] public JsonNode setAll(Map<String,? extends JsonNode> properties) { for (Map.Entry<String,? extends JsonNode> en : properties.entrySet()) { JsonNode n = en.getValue(); if (n == null) { n = nullNode(); } _children.put(en.getKey(), n); } return this; }
@ JsonIgnore public JsonNode setAll ( Map < String , ? extends JsonNode > properties ) { for ( Map . Entry < String , ? extends JsonNode > en : properties . entrySet ( ) ) { JsonNode n = en . getValue ( ) ; if ( n == null ) { n = nullNode ( ) ; } _children . put ( en . getKey ( ) , n ) ; } return this ; }
Csv
4
src/main/java/org/apache/commons/csv/CSVParser.java
287
289
CSVParser: getHeaderMap throws NPE
title nearly says it all :-) Given a CSVParser parser, the following line throws an NPE: {code} Map<String, Integer> header = parser.getHeaderMap(); {code} Stacktrace: {noformat} Caused by: java.lang.NullPointerException at java.util.HashMap.<init>(HashMap.java:318) at java.util.LinkedHashMap.<init>(LinkedHashMap.java:212) at org.apache.commons.csv.CSVParser.getHeaderMap(CSVParser.java:288) {noformat} happens if the format doesn't have a headerMap. to fix, check if the parser's headerMap is null before trying to create the returned map: {code} public Map<String, Integer> getHeaderMap() { return this.headerMap != null ? new LinkedHashMap<String, Integer>(this.headerMap) : null; } {code}
public Map<String, Integer> getHeaderMap() { return new LinkedHashMap<String, Integer>(this.headerMap); }
public Map < String , Integer > getHeaderMap ( ) { return new LinkedHashMap < String , Integer > ( this . headerMap ) ; }
public Map<String, Integer> getHeaderMap() { return this.headerMap == null ? null : new LinkedHashMap<String, Integer>(this.headerMap); }
public Map < String , Integer > getHeaderMap ( ) { return this . headerMap == null ? null : new LinkedHashMap < String , Integer > ( this . headerMap ) ; }
Math
85
src/java/org/apache/commons/math/analysis/solvers/UnivariateRealSolverUtils.java
165
208
bug in inverseCumulativeProbability() for Normal Distribution
* @version $Revision: 617953 $ $Date: 2008-02-02 22:54:00 -0700 (Sat, 02 Feb 2008) $ */ public class NormalDistributionImpl extends AbstractContinuousDistribution * @version $Revision: 506600 $ $Date: 2007-02-12 12:35:59 -0700 (Mon, 12 Feb 2007) $ */ public abstract class AbstractContinuousDistribution This code: DistributionFactory factory = app.getDistributionFactory(); NormalDistribution normal = factory.createNormalDistribution(0,1); double result = normal.inverseCumulativeProbability(0.9772498680518209); gives the exception below. It should return (approx) 2.0000... normal.inverseCumulativeProbability(0.977249868051820); works fine These also give errors: 0.9986501019683698 (should return 3.0000...) 0.9999683287581673 (should return 4.0000...) org.apache.commons.math.MathException: Number of iterations=1, maximum iterations=2,147,483,647, initial=1, lower bound=0, upper bound=179,769,313,486,231,570,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000,000, final a value=0, final b value=2, f(a)=-0.477, f(b)=0 at org.apache.commons.math.distribution.AbstractContinuousDistribution.inverseCumulativeProbability(AbstractContinuousDistribution.java:103) at org.apache.commons.math.distribution.NormalDistributionImpl.inverseCumulativeProbability(NormalDistributionImpl.java:145)
public static double[] bracket(UnivariateRealFunction function, double initial, double lowerBound, double upperBound, int maximumIterations) throws ConvergenceException, FunctionEvaluationException { if (function == null) { throw MathRuntimeException.createIllegalArgumentException("function is null"); } if (maximumIterations <= 0) { throw MathRuntimeException.createIllegalArgumentException( "bad value for maximum iterations number: {0}", maximumIterations); } if (initial < lowerBound || initial > upperBound || lowerBound >= upperBound) { throw MathRuntimeException.createIllegalArgumentException( "invalid bracketing parameters: lower bound={0}, initial={1}, upper bound={2}", lowerBound, initial, upperBound); } double a = initial; double b = initial; double fa; double fb; int numIterations = 0 ; do { a = Math.max(a - 1.0, lowerBound); b = Math.min(b + 1.0, upperBound); fa = function.value(a); fb = function.value(b); numIterations++ ; } while ((fa * fb > 0.0) && (numIterations < maximumIterations) && ((a > lowerBound) || (b < upperBound))); if (fa * fb >= 0.0 ) { throw new ConvergenceException( "number of iterations={0}, maximum iterations={1}, " + "initial={2}, lower bound={3}, upper bound={4}, final a value={5}, " + "final b value={6}, f(a)={7}, f(b)={8}", numIterations, maximumIterations, initial, lowerBound, upperBound, a, b, fa, fb); } return new double[]{a, b}; }
public static double [ ] bracket ( UnivariateRealFunction function , double initial , double lowerBound , double upperBound , int maximumIterations ) throws ConvergenceException , FunctionEvaluationException { if ( function == null ) { throw MathRuntimeException . createIllegalArgumentException ( "function is null" ) ; } if ( maximumIterations <= 0 ) { throw MathRuntimeException . createIllegalArgumentException ( "bad value for maximum iterations number: {0}" , maximumIterations ) ; } if ( initial < lowerBound || initial > upperBound || lowerBound >= upperBound ) { throw MathRuntimeException . createIllegalArgumentException ( "invalid bracketing parameters: lower bound={0}, initial={1}, upper bound={2}" , lowerBound , initial , upperBound ) ; } double a = initial ; double b = initial ; double fa ; double fb ; int numIterations = 0 ; do { a = Math . max ( a - 1.0 , lowerBound ) ; b = Math . min ( b + 1.0 , upperBound ) ; fa = function . value ( a ) ; fb = function . value ( b ) ; numIterations ++ ; } while ( ( fa * fb > 0.0 ) && ( numIterations < maximumIterations ) && ( ( a > lowerBound ) || ( b < upperBound ) ) ) ; if ( fa * fb >= 0.0 ) { throw new ConvergenceException ( "number of iterations={0}, maximum iterations={1}, " + "initial={2}, lower bound={3}, upper bound={4}, final a value={5}, " + "final b value={6}, f(a)={7}, f(b)={8}" , numIterations , maximumIterations , initial , lowerBound , upperBound , a , b , fa , fb ) ; } return new double [ ] { a , b } ; }
public static double[] bracket(UnivariateRealFunction function, double initial, double lowerBound, double upperBound, int maximumIterations) throws ConvergenceException, FunctionEvaluationException { if (function == null) { throw MathRuntimeException.createIllegalArgumentException("function is null"); } if (maximumIterations <= 0) { throw MathRuntimeException.createIllegalArgumentException( "bad value for maximum iterations number: {0}", maximumIterations); } if (initial < lowerBound || initial > upperBound || lowerBound >= upperBound) { throw MathRuntimeException.createIllegalArgumentException( "invalid bracketing parameters: lower bound={0}, initial={1}, upper bound={2}", lowerBound, initial, upperBound); } double a = initial; double b = initial; double fa; double fb; int numIterations = 0 ; do { a = Math.max(a - 1.0, lowerBound); b = Math.min(b + 1.0, upperBound); fa = function.value(a); fb = function.value(b); numIterations++ ; } while ((fa * fb > 0.0) && (numIterations < maximumIterations) && ((a > lowerBound) || (b < upperBound))); if (fa * fb > 0.0 ) { throw new ConvergenceException( "number of iterations={0}, maximum iterations={1}, " + "initial={2}, lower bound={3}, upper bound={4}, final a value={5}, " + "final b value={6}, f(a)={7}, f(b)={8}", numIterations, maximumIterations, initial, lowerBound, upperBound, a, b, fa, fb); } return new double[]{a, b}; }
public static double [ ] bracket ( UnivariateRealFunction function , double initial , double lowerBound , double upperBound , int maximumIterations ) throws ConvergenceException , FunctionEvaluationException { if ( function == null ) { throw MathRuntimeException . createIllegalArgumentException ( "function is null" ) ; } if ( maximumIterations <= 0 ) { throw MathRuntimeException . createIllegalArgumentException ( "bad value for maximum iterations number: {0}" , maximumIterations ) ; } if ( initial < lowerBound || initial > upperBound || lowerBound >= upperBound ) { throw MathRuntimeException . createIllegalArgumentException ( "invalid bracketing parameters: lower bound={0}, initial={1}, upper bound={2}" , lowerBound , initial , upperBound ) ; } double a = initial ; double b = initial ; double fa ; double fb ; int numIterations = 0 ; do { a = Math . max ( a - 1.0 , lowerBound ) ; b = Math . min ( b + 1.0 , upperBound ) ; fa = function . value ( a ) ; fb = function . value ( b ) ; numIterations ++ ; } while ( ( fa * fb > 0.0 ) && ( numIterations < maximumIterations ) && ( ( a > lowerBound ) || ( b < upperBound ) ) ) ; if ( fa * fb > 0.0 ) { throw new ConvergenceException ( "number of iterations={0}, maximum iterations={1}, " + "initial={2}, lower bound={3}, upper bound={4}, final a value={5}, " + "final b value={6}, f(a)={7}, f(b)={8}" , numIterations , maximumIterations , initial , lowerBound , upperBound , a , b , fa , fb ) ; } return new double [ ] { a , b } ; }
Mockito
22
src/org/mockito/internal/matchers/Equality.java
12
20
areEqual does not try == operator for trivial equality case and sometimes has unneccessary exceptions
What steps will reproduce the problem? 1. Any class that throws an exception in its equals method. For example: private final class BadEquals { @Override public boolean equals (Object oth) { throw new RuntimeException(); } } 2. Create an instance of that class. Object badequals=new BadEquals(); 3. Test for equality using org.mockito.internal.matchers.Equality.areEqual: areEqual(badequals,badequals) What is the expected output? What do you see instead? I expect to see they are equal.I suspect that the areEqual method is calling the equals method which is throwing an Exception. In this case, we can use the java equality operator to see that they are equal, bypassing the equals method and its exception.
public static boolean areEqual(Object o1, Object o2) { if (o1 == null || o2 == null) { return o1 == null && o2 == null; } else if (isArray(o1)) { return isArray(o2) && areArraysEqual(o1, o2); } else { return o1.equals(o2); } }
public static boolean areEqual ( Object o1 , Object o2 ) { if ( o1 == null || o2 == null ) { return o1 == null && o2 == null ; } else if ( isArray ( o1 ) ) { return isArray ( o2 ) && areArraysEqual ( o1 , o2 ) ; } else { return o1 . equals ( o2 ) ; } }
public static boolean areEqual(Object o1, Object o2) { if (o1 == o2 ) { return true; } else if (o1 == null || o2 == null) { return o1 == null && o2 == null; } else if (isArray(o1)) { return isArray(o2) && areArraysEqual(o1, o2); } else { return o1.equals(o2); } }
public static boolean areEqual ( Object o1 , Object o2 ) { if ( o1 == o2 ) { return true ; } else if ( o1 == null || o2 == null ) { return o1 == null && o2 == null ; } else if ( isArray ( o1 ) ) { return isArray ( o2 ) && areArraysEqual ( o1 , o2 ) ; } else { return o1 . equals ( o2 ) ; } }
JacksonDatabind
58
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
703
752
`@JsonIgnoreProperties`: ignoring the "cause" property of `Throwable` on GAE
Deserializing an exception class from json on Google App Engine causes this error: ``` Caused by: java.lang.IllegalArgumentException: Can not access private java.lang.Throwable java.lang.Throwable.cause (from class java.lang.Throwable; failed to set access: java.lang.IllegalAccessException: Reflection is not allowed on private java.lang.Throwable java.lang.Throwable.cause at com.fasterxml.jackson.databind.util.ClassUtil.checkAndFixAccess(ClassUtil.java:505) at com.fasterxml.jackson.databind.introspect.AnnotatedMember.fixAccess(AnnotatedMember.java:123) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.constructSettableProperty(BeanDeserializerFactory.java:704) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.addBeanProps(BeanDeserializerFactory.java:501) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.buildThrowableDeserializer(BeanDeserializerFactory.java:356) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.createBeanDeserializer(BeanDeserializerFactory.java:114) ``` I tried preventing this by using `@JsonIgnoreProperties`: ``` java @JsonIgnoreProperties("cause") public class MyException extends RuntimeException { ... } ``` ... but the same error still occurs. What am I doing wrong? What else could I do? I've also considered setting `MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS` to false, but I don't like this solution because I need this setting to be `true` in some other cases (in particular, I provide no-arg constructors for Jackson, but they should't be public in my API).
protected SettableBeanProperty constructSettableProperty(DeserializationContext ctxt, BeanDescription beanDesc, BeanPropertyDefinition propDef, JavaType propType0) throws JsonMappingException { // need to ensure method is callable (for non-public) AnnotatedMember mutator = propDef.getNonConstructorMutator(); if (ctxt.canOverrideAccessModifiers()) { // [databind#877]: explicitly prevent forced access to `cause` of `Throwable`; // never needed and attempts may cause problems on some platforms. // !!! NOTE: should be handled better for 2.8 and later mutator.fixAccess(ctxt.isEnabled(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS)); } // note: this works since we know there's exactly one argument for methods BeanProperty.Std property = new BeanProperty.Std(propDef.getFullName(), propType0, propDef.getWrapperName(), beanDesc.getClassAnnotations(), mutator, propDef.getMetadata()); JavaType type = resolveType(ctxt, beanDesc, propType0, mutator); // did type change? if (type != propType0) { property = property.withType(type); } // First: does the Method specify the deserializer to use? If so, let's use it. JsonDeserializer<Object> propDeser = findDeserializerFromAnnotation(ctxt, mutator); type = modifyTypeByAnnotation(ctxt, mutator, type); TypeDeserializer typeDeser = type.getTypeHandler(); SettableBeanProperty prop; if (mutator instanceof AnnotatedMethod) { prop = new MethodProperty(propDef, type, typeDeser, beanDesc.getClassAnnotations(), (AnnotatedMethod) mutator); } else { prop = new FieldProperty(propDef, type, typeDeser, beanDesc.getClassAnnotations(), (AnnotatedField) mutator); } if (propDeser != null) { prop = prop.withValueDeserializer(propDeser); } // need to retain name of managed forward references: AnnotationIntrospector.ReferenceProperty ref = propDef.findReferenceType(); if (ref != null && ref.isManagedReference()) { prop.setManagedReferenceName(ref.getName()); } ObjectIdInfo objectIdInfo = propDef.findObjectIdInfo(); if(objectIdInfo != null){ prop.setObjectIdInfo(objectIdInfo); } return prop; }
protected SettableBeanProperty constructSettableProperty ( DeserializationContext ctxt , BeanDescription beanDesc , BeanPropertyDefinition propDef , JavaType propType0 ) throws JsonMappingException { AnnotatedMember mutator = propDef . getNonConstructorMutator ( ) ; if ( ctxt . canOverrideAccessModifiers ( ) ) { mutator . fixAccess ( ctxt . isEnabled ( MapperFeature . OVERRIDE_PUBLIC_ACCESS_MODIFIERS ) ) ; } BeanProperty . Std property = new BeanProperty . Std ( propDef . getFullName ( ) , propType0 , propDef . getWrapperName ( ) , beanDesc . getClassAnnotations ( ) , mutator , propDef . getMetadata ( ) ) ; JavaType type = resolveType ( ctxt , beanDesc , propType0 , mutator ) ; if ( type != propType0 ) { property = property . withType ( type ) ; } JsonDeserializer < Object > propDeser = findDeserializerFromAnnotation ( ctxt , mutator ) ; type = modifyTypeByAnnotation ( ctxt , mutator , type ) ; TypeDeserializer typeDeser = type . getTypeHandler ( ) ; SettableBeanProperty prop ; if ( mutator instanceof AnnotatedMethod ) { prop = new MethodProperty ( propDef , type , typeDeser , beanDesc . getClassAnnotations ( ) , ( AnnotatedMethod ) mutator ) ; } else { prop = new FieldProperty ( propDef , type , typeDeser , beanDesc . getClassAnnotations ( ) , ( AnnotatedField ) mutator ) ; } if ( propDeser != null ) { prop = prop . withValueDeserializer ( propDeser ) ; } AnnotationIntrospector . ReferenceProperty ref = propDef . findReferenceType ( ) ; if ( ref != null && ref . isManagedReference ( ) ) { prop . setManagedReferenceName ( ref . getName ( ) ) ; } ObjectIdInfo objectIdInfo = propDef . findObjectIdInfo ( ) ; if ( objectIdInfo != null ) { prop . setObjectIdInfo ( objectIdInfo ) ; } return prop ; }
protected SettableBeanProperty constructSettableProperty(DeserializationContext ctxt, BeanDescription beanDesc, BeanPropertyDefinition propDef, JavaType propType0) throws JsonMappingException { // need to ensure method is callable (for non-public) AnnotatedMember mutator = propDef.getNonConstructorMutator(); if (ctxt.canOverrideAccessModifiers()) { // [databind#877]: explicitly prevent forced access to `cause` of `Throwable`; // never needed and attempts may cause problems on some platforms. // !!! NOTE: should be handled better for 2.8 and later if ((mutator instanceof AnnotatedField) && "cause".equals(mutator.getName())) { ; } else { mutator.fixAccess(ctxt.isEnabled(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS)); } } // note: this works since we know there's exactly one argument for methods BeanProperty.Std property = new BeanProperty.Std(propDef.getFullName(), propType0, propDef.getWrapperName(), beanDesc.getClassAnnotations(), mutator, propDef.getMetadata()); JavaType type = resolveType(ctxt, beanDesc, propType0, mutator); // did type change? if (type != propType0) { property = property.withType(type); } // First: does the Method specify the deserializer to use? If so, let's use it. JsonDeserializer<Object> propDeser = findDeserializerFromAnnotation(ctxt, mutator); type = modifyTypeByAnnotation(ctxt, mutator, type); TypeDeserializer typeDeser = type.getTypeHandler(); SettableBeanProperty prop; if (mutator instanceof AnnotatedMethod) { prop = new MethodProperty(propDef, type, typeDeser, beanDesc.getClassAnnotations(), (AnnotatedMethod) mutator); } else { prop = new FieldProperty(propDef, type, typeDeser, beanDesc.getClassAnnotations(), (AnnotatedField) mutator); } if (propDeser != null) { prop = prop.withValueDeserializer(propDeser); } // need to retain name of managed forward references: AnnotationIntrospector.ReferenceProperty ref = propDef.findReferenceType(); if (ref != null && ref.isManagedReference()) { prop.setManagedReferenceName(ref.getName()); } ObjectIdInfo objectIdInfo = propDef.findObjectIdInfo(); if(objectIdInfo != null){ prop.setObjectIdInfo(objectIdInfo); } return prop; }
protected SettableBeanProperty constructSettableProperty ( DeserializationContext ctxt , BeanDescription beanDesc , BeanPropertyDefinition propDef , JavaType propType0 ) throws JsonMappingException { AnnotatedMember mutator = propDef . getNonConstructorMutator ( ) ; if ( ctxt . canOverrideAccessModifiers ( ) ) { if ( ( mutator instanceof AnnotatedField ) && "cause" . equals ( mutator . getName ( ) ) ) { ; } else { mutator . fixAccess ( ctxt . isEnabled ( MapperFeature . OVERRIDE_PUBLIC_ACCESS_MODIFIERS ) ) ; } } BeanProperty . Std property = new BeanProperty . Std ( propDef . getFullName ( ) , propType0 , propDef . getWrapperName ( ) , beanDesc . getClassAnnotations ( ) , mutator , propDef . getMetadata ( ) ) ; JavaType type = resolveType ( ctxt , beanDesc , propType0 , mutator ) ; if ( type != propType0 ) { property = property . withType ( type ) ; } JsonDeserializer < Object > propDeser = findDeserializerFromAnnotation ( ctxt , mutator ) ; type = modifyTypeByAnnotation ( ctxt , mutator , type ) ; TypeDeserializer typeDeser = type . getTypeHandler ( ) ; SettableBeanProperty prop ; if ( mutator instanceof AnnotatedMethod ) { prop = new MethodProperty ( propDef , type , typeDeser , beanDesc . getClassAnnotations ( ) , ( AnnotatedMethod ) mutator ) ; } else { prop = new FieldProperty ( propDef , type , typeDeser , beanDesc . getClassAnnotations ( ) , ( AnnotatedField ) mutator ) ; } if ( propDeser != null ) { prop = prop . withValueDeserializer ( propDeser ) ; } AnnotationIntrospector . ReferenceProperty ref = propDef . findReferenceType ( ) ; if ( ref != null && ref . isManagedReference ( ) ) { prop . setManagedReferenceName ( ref . getName ( ) ) ; } ObjectIdInfo objectIdInfo = propDef . findObjectIdInfo ( ) ; if ( objectIdInfo != null ) { prop . setObjectIdInfo ( objectIdInfo ) ; } return prop ; }
JacksonXml
2
src/main/java/com/fasterxml/jackson/dataformat/xml/deser/XmlTokenStream.java
309
356
Mixed content not supported if there are child elements.
@XmlText is only supported if there are no child elements, support could be improved with some changes in XmlTokenStream. I successfully made some changes in XmlTokenStream, it's working in my personal case, but it needs more tests. If agreed, I could provide a patch. Example: Input string : `"<windSpeed units=\"kt\">27<radius>20</radius></windSpeed>"` "CxmlWindSpeed" class : ``` public class WindSpeed { public static class Radius { @JacksonXmlProperty(isAttribute = true) private String sector; @JacksonXmlProperty(isAttribute = true) private String units; @JacksonXmlText private int value; ..../ Getters and Setters code/.... } @JacksonXmlProperty(isAttribute = true) private String units; @JacksonXmlProperty(isAttribute = true) private String source; @JacksonXmlText private int value; @JacksonXmlElementWrapper(useWrapping = false) private List<Radius> radius; ..../ Getters and Setters code/.... } ```
private final int _next() throws XMLStreamException { switch (_currentState) { case XML_ATTRIBUTE_VALUE: ++_nextAttributeIndex; // fall through case XML_START_ELEMENT: // attributes to return? if (_nextAttributeIndex < _attributeCount) { _localName = _xmlReader.getAttributeLocalName(_nextAttributeIndex); _namespaceURI = _xmlReader.getAttributeNamespace(_nextAttributeIndex); _textValue = _xmlReader.getAttributeValue(_nextAttributeIndex); return (_currentState = XML_ATTRIBUTE_NAME); } // otherwise need to find START/END_ELEMENT or text String text = _collectUntilTag(); // If we have no/all-whitespace text followed by START_ELEMENT, ignore text if (_xmlReader.getEventType() == XMLStreamReader.START_ELEMENT) { return _initStartElement(); } // For END_ELEMENT we will return text, if any if (text != null) { _textValue = text; return (_currentState = XML_TEXT); } return _handleEndElement(); case XML_ATTRIBUTE_NAME: // if we just returned name, will need to just send value next return (_currentState = XML_ATTRIBUTE_VALUE); case XML_TEXT: // mixed text with other elements // text followed by END_ELEMENT return _handleEndElement(); case XML_END: return XML_END; // throw new IllegalStateException("No more XML tokens available (end of input)"); } // Ok: must be END_ELEMENT; see what tag we get (or end) switch (_skipUntilTag()) { case XMLStreamConstants.END_DOCUMENT: return (_currentState = XML_END); case XMLStreamConstants.END_ELEMENT: return _handleEndElement(); } // START_ELEMENT... return _initStartElement(); }
private final int _next ( ) throws XMLStreamException { switch ( _currentState ) { case XML_ATTRIBUTE_VALUE : ++ _nextAttributeIndex ; case XML_START_ELEMENT : if ( _nextAttributeIndex < _attributeCount ) { _localName = _xmlReader . getAttributeLocalName ( _nextAttributeIndex ) ; _namespaceURI = _xmlReader . getAttributeNamespace ( _nextAttributeIndex ) ; _textValue = _xmlReader . getAttributeValue ( _nextAttributeIndex ) ; return ( _currentState = XML_ATTRIBUTE_NAME ) ; } String text = _collectUntilTag ( ) ; if ( _xmlReader . getEventType ( ) == XMLStreamReader . START_ELEMENT ) { return _initStartElement ( ) ; } if ( text != null ) { _textValue = text ; return ( _currentState = XML_TEXT ) ; } return _handleEndElement ( ) ; case XML_ATTRIBUTE_NAME : return ( _currentState = XML_ATTRIBUTE_VALUE ) ; case XML_TEXT : return _handleEndElement ( ) ; case XML_END : return XML_END ; } switch ( _skipUntilTag ( ) ) { case XMLStreamConstants . END_DOCUMENT : return ( _currentState = XML_END ) ; case XMLStreamConstants . END_ELEMENT : return _handleEndElement ( ) ; } return _initStartElement ( ) ; }
private final int _next() throws XMLStreamException { switch (_currentState) { case XML_ATTRIBUTE_VALUE: ++_nextAttributeIndex; // fall through case XML_START_ELEMENT: // attributes to return? if (_nextAttributeIndex < _attributeCount) { _localName = _xmlReader.getAttributeLocalName(_nextAttributeIndex); _namespaceURI = _xmlReader.getAttributeNamespace(_nextAttributeIndex); _textValue = _xmlReader.getAttributeValue(_nextAttributeIndex); return (_currentState = XML_ATTRIBUTE_NAME); } // otherwise need to find START/END_ELEMENT or text String text = _collectUntilTag(); final boolean startElementNext = _xmlReader.getEventType() == XMLStreamReader.START_ELEMENT; // If we have no/all-whitespace text followed by START_ELEMENT, ignore text if (startElementNext) { if (text == null || _allWs(text)) { _mixedText = false; return _initStartElement(); } _mixedText = true; _textValue = text; return (_currentState = XML_TEXT); } // For END_ELEMENT we will return text, if any if (text != null) { _mixedText = false; _textValue = text; return (_currentState = XML_TEXT); } _mixedText = false; return _handleEndElement(); case XML_ATTRIBUTE_NAME: // if we just returned name, will need to just send value next return (_currentState = XML_ATTRIBUTE_VALUE); case XML_TEXT: // mixed text with other elements if (_mixedText){ _mixedText = false; return _initStartElement(); } // text followed by END_ELEMENT return _handleEndElement(); case XML_END: return XML_END; // throw new IllegalStateException("No more XML tokens available (end of input)"); } // Ok: must be END_ELEMENT; see what tag we get (or end) switch (_skipUntilTag()) { case XMLStreamConstants.END_DOCUMENT: return (_currentState = XML_END); case XMLStreamConstants.END_ELEMENT: return _handleEndElement(); } // START_ELEMENT... return _initStartElement(); }
private final int _next ( ) throws XMLStreamException { switch ( _currentState ) { case XML_ATTRIBUTE_VALUE : ++ _nextAttributeIndex ; case XML_START_ELEMENT : if ( _nextAttributeIndex < _attributeCount ) { _localName = _xmlReader . getAttributeLocalName ( _nextAttributeIndex ) ; _namespaceURI = _xmlReader . getAttributeNamespace ( _nextAttributeIndex ) ; _textValue = _xmlReader . getAttributeValue ( _nextAttributeIndex ) ; return ( _currentState = XML_ATTRIBUTE_NAME ) ; } String text = _collectUntilTag ( ) ; final boolean startElementNext = _xmlReader . getEventType ( ) == XMLStreamReader . START_ELEMENT ; if ( startElementNext ) { if ( text == null || _allWs ( text ) ) { _mixedText = false ; return _initStartElement ( ) ; } _mixedText = true ; _textValue = text ; return ( _currentState = XML_TEXT ) ; } if ( text != null ) { _mixedText = false ; _textValue = text ; return ( _currentState = XML_TEXT ) ; } _mixedText = false ; return _handleEndElement ( ) ; case XML_ATTRIBUTE_NAME : return ( _currentState = XML_ATTRIBUTE_VALUE ) ; case XML_TEXT : if ( _mixedText ) { _mixedText = false ; return _initStartElement ( ) ; } return _handleEndElement ( ) ; case XML_END : return XML_END ; } switch ( _skipUntilTag ( ) ) { case XMLStreamConstants . END_DOCUMENT : return ( _currentState = XML_END ) ; case XMLStreamConstants . END_ELEMENT : return _handleEndElement ( ) ; } return _initStartElement ( ) ; }
Compress
32
src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
481
518
TarArchiveInputStream rejects uid or gid >= 0x80000000
A POSIX-format archive that came from sysdiagnose produces NumberFormatException[1] when I try to read it with TarArchiveInputStream. The relevant part of the .tar file looks like this: 18 uid=429496729 That's the uid of 'nobody' on Mac OS (on Mac OS, uid_t is 'unsigned int'). POSIX doesn't say anything about the width of the uid extended header[2], so I assume the tar file is okay. GNU tar doesn't have trouble with it. The relevant code, in applyPaxHeadersToCurrentEntry: } else if ("gid".equals(key)){ currEntry.setGroupId(Integer.parseInt(val)); ... } else if ("uid".equals(key)){ currEntry.setUserId(Integer.parseInt(val)); uid_t and gid_t are typically unsigned 32-bit integers, so these should presumably use Long.parseLong to handle integers with the top bit set (and TarArchiveEntry would need some modifications to handle large uid and gid, too). [1] java.lang.NumberFormatException: For input string: "4294967294" at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.lang.Integer.parseInt(Integer.java:495) at java.lang.Integer.parseInt(Integer.java:527) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.applyPaxHeadersToCurrentEntry(TarArchiveInputStream.java:488) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.paxHeaders(TarArchiveInputStream.java:415) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:295) [2] http://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_03 uid The user ID of the file owner, expressed as a decimal number using digits from the ISO/IEC 646:1991 standard. This record shall override the uid field in the following header block(s). When used in write or copy mode, pax shall include a uid extended header record for each file whose owner ID is greater than 2097151 (octal 7777777).
private void applyPaxHeadersToCurrentEntry(Map<String, String> headers) { /* * The following headers are defined for Pax. * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields * mtime * comment * gid, gname * linkpath * size * uid,uname * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those */ for (Entry<String, String> ent : headers.entrySet()){ String key = ent.getKey(); String val = ent.getValue(); if ("path".equals(key)){ currEntry.setName(val); } else if ("linkpath".equals(key)){ currEntry.setLinkName(val); } else if ("gid".equals(key)){ currEntry.setGroupId(Integer.parseInt(val)); } else if ("gname".equals(key)){ currEntry.setGroupName(val); } else if ("uid".equals(key)){ currEntry.setUserId(Integer.parseInt(val)); } else if ("uname".equals(key)){ currEntry.setUserName(val); } else if ("size".equals(key)){ currEntry.setSize(Long.parseLong(val)); } else if ("mtime".equals(key)){ currEntry.setModTime((long) (Double.parseDouble(val) * 1000)); } else if ("SCHILY.devminor".equals(key)){ currEntry.setDevMinor(Integer.parseInt(val)); } else if ("SCHILY.devmajor".equals(key)){ currEntry.setDevMajor(Integer.parseInt(val)); } } }
private void applyPaxHeadersToCurrentEntry ( Map < String , String > headers ) { for ( Entry < String , String > ent : headers . entrySet ( ) ) { String key = ent . getKey ( ) ; String val = ent . getValue ( ) ; if ( "path" . equals ( key ) ) { currEntry . setName ( val ) ; } else if ( "linkpath" . equals ( key ) ) { currEntry . setLinkName ( val ) ; } else if ( "gid" . equals ( key ) ) { currEntry . setGroupId ( Integer . parseInt ( val ) ) ; } else if ( "gname" . equals ( key ) ) { currEntry . setGroupName ( val ) ; } else if ( "uid" . equals ( key ) ) { currEntry . setUserId ( Integer . parseInt ( val ) ) ; } else if ( "uname" . equals ( key ) ) { currEntry . setUserName ( val ) ; } else if ( "size" . equals ( key ) ) { currEntry . setSize ( Long . parseLong ( val ) ) ; } else if ( "mtime" . equals ( key ) ) { currEntry . setModTime ( ( long ) ( Double . parseDouble ( val ) * 1000 ) ) ; } else if ( "SCHILY.devminor" . equals ( key ) ) { currEntry . setDevMinor ( Integer . parseInt ( val ) ) ; } else if ( "SCHILY.devmajor" . equals ( key ) ) { currEntry . setDevMajor ( Integer . parseInt ( val ) ) ; } } }
private void applyPaxHeadersToCurrentEntry(Map<String, String> headers) { /* * The following headers are defined for Pax. * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields * mtime * comment * gid, gname * linkpath * size * uid,uname * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those */ for (Entry<String, String> ent : headers.entrySet()){ String key = ent.getKey(); String val = ent.getValue(); if ("path".equals(key)){ currEntry.setName(val); } else if ("linkpath".equals(key)){ currEntry.setLinkName(val); } else if ("gid".equals(key)){ currEntry.setGroupId(Long.parseLong(val)); } else if ("gname".equals(key)){ currEntry.setGroupName(val); } else if ("uid".equals(key)){ currEntry.setUserId(Long.parseLong(val)); } else if ("uname".equals(key)){ currEntry.setUserName(val); } else if ("size".equals(key)){ currEntry.setSize(Long.parseLong(val)); } else if ("mtime".equals(key)){ currEntry.setModTime((long) (Double.parseDouble(val) * 1000)); } else if ("SCHILY.devminor".equals(key)){ currEntry.setDevMinor(Integer.parseInt(val)); } else if ("SCHILY.devmajor".equals(key)){ currEntry.setDevMajor(Integer.parseInt(val)); } } }
private void applyPaxHeadersToCurrentEntry ( Map < String , String > headers ) { for ( Entry < String , String > ent : headers . entrySet ( ) ) { String key = ent . getKey ( ) ; String val = ent . getValue ( ) ; if ( "path" . equals ( key ) ) { currEntry . setName ( val ) ; } else if ( "linkpath" . equals ( key ) ) { currEntry . setLinkName ( val ) ; } else if ( "gid" . equals ( key ) ) { currEntry . setGroupId ( Long . parseLong ( val ) ) ; } else if ( "gname" . equals ( key ) ) { currEntry . setGroupName ( val ) ; } else if ( "uid" . equals ( key ) ) { currEntry . setUserId ( Long . parseLong ( val ) ) ; } else if ( "uname" . equals ( key ) ) { currEntry . setUserName ( val ) ; } else if ( "size" . equals ( key ) ) { currEntry . setSize ( Long . parseLong ( val ) ) ; } else if ( "mtime" . equals ( key ) ) { currEntry . setModTime ( ( long ) ( Double . parseDouble ( val ) * 1000 ) ) ; } else if ( "SCHILY.devminor" . equals ( key ) ) { currEntry . setDevMinor ( Integer . parseInt ( val ) ) ; } else if ( "SCHILY.devmajor" . equals ( key ) ) { currEntry . setDevMajor ( Integer . parseInt ( val ) ) ; } } }
Compress
24
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
102
153
TarArchiveInputStream fails to read entry with big user-id value
Caused by: java.lang.IllegalArgumentException: Invalid byte 52 at offset 7 in '62410554' len=8 at org.apache.commons.compress.archivers.tar.TarUtils.parseOctal(TarUtils.java:130) at org.apache.commons.compress.archivers.tar.TarUtils.parseOctalOrBinary(TarUtils.java:175) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:953) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:940) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.<init>(TarArchiveEntry.java:324) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:247) ... 5 more
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; if (trailer == 0 || trailer == ' '){ end--; } else { throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, end-1, trailer)); } trailer = buffer[end - 1]; while (start < end - 1 && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } else { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , end - 1 , trailer ) ) ; } trailer = buffer [ end - 1 ] ; while ( start < end - 1 && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
public static long parseOctal(final byte[] buffer, final int offset, final int length) { long result = 0; int end = offset + length; int start = offset; if (length < 2){ throw new IllegalArgumentException("Length "+length+" must be at least 2"); } if (buffer[start] == 0) { return 0L; } // Skip leading spaces while (start < end){ if (buffer[start] == ' '){ start++; } else { break; } } // Trim all trailing NULs and spaces. // The ustar and POSIX tar specs require a trailing NUL or // space but some implementations use the extra digit for big // sizes/uids/gids ... byte trailer = buffer[end - 1]; while (start < end && (trailer == 0 || trailer == ' ')) { end--; trailer = buffer[end - 1]; } if (start == end) { throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, trailer)); } for ( ;start < end; start++) { final byte currentByte = buffer[start]; // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( exceptionMessage(buffer, offset, length, start, currentByte)); } result = (result << 3) + (currentByte - '0'); // convert from ASCII // CheckStyle:MagicNumber ON } return result; }
public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer = buffer [ end - 1 ] ; while ( start < end && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } if ( start == end ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , trailer ) ) ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; }
JacksonDatabind
19
src/main/java/com/fasterxml/jackson/databind/type/TypeFactory.java
1018
1031
Force value coercion for `java.util.Properties`, so that values are `String`s
Currently there is no custom handling for `java.util.Properties`, and although it is possible to use it (since it really is a `Map` under the hood), results are only good if values are already `String`s. The problem here is that `Properties` is actually declared as `Map<String,Object>`, probably due to backwards-compatibility constraints. But Jackson should know better: perhaps by `TypeFactory` tweaking parameterizations a bit?
private JavaType _mapType(Class<?> rawClass) { // 28-May-2015, tatu: Properties are special, as per [databind#810] JavaType[] typeParams = findTypeParameters(rawClass, Map.class); // ok to have no types ("raw") if (typeParams == null) { return MapType.construct(rawClass, _unknownType(), _unknownType()); } // but exactly 2 types if any found if (typeParams.length != 2) { throw new IllegalArgumentException("Strange Map type "+rawClass.getName()+": can not determine type parameters"); } return MapType.construct(rawClass, typeParams[0], typeParams[1]); }
private JavaType _mapType ( Class < ? > rawClass ) { JavaType [ ] typeParams = findTypeParameters ( rawClass , Map . class ) ; if ( typeParams == null ) { return MapType . construct ( rawClass , _unknownType ( ) , _unknownType ( ) ) ; } if ( typeParams . length != 2 ) { throw new IllegalArgumentException ( "Strange Map type " + rawClass . getName ( ) + ": can not determine type parameters" ) ; } return MapType . construct ( rawClass , typeParams [ 0 ] , typeParams [ 1 ] ) ; }
private JavaType _mapType(Class<?> rawClass) { // 28-May-2015, tatu: Properties are special, as per [databind#810] if (rawClass == Properties.class) { return MapType.construct(rawClass, CORE_TYPE_STRING, CORE_TYPE_STRING); } JavaType[] typeParams = findTypeParameters(rawClass, Map.class); // ok to have no types ("raw") if (typeParams == null) { return MapType.construct(rawClass, _unknownType(), _unknownType()); } // but exactly 2 types if any found if (typeParams.length != 2) { throw new IllegalArgumentException("Strange Map type "+rawClass.getName()+": can not determine type parameters"); } return MapType.construct(rawClass, typeParams[0], typeParams[1]); }
private JavaType _mapType ( Class < ? > rawClass ) { if ( rawClass == Properties . class ) { return MapType . construct ( rawClass , CORE_TYPE_STRING , CORE_TYPE_STRING ) ; } JavaType [ ] typeParams = findTypeParameters ( rawClass , Map . class ) ; if ( typeParams == null ) { return MapType . construct ( rawClass , _unknownType ( ) , _unknownType ( ) ) ; } if ( typeParams . length != 2 ) { throw new IllegalArgumentException ( "Strange Map type " + rawClass . getName ( ) + ": can not determine type parameters" ) ; } return MapType . construct ( rawClass , typeParams [ 0 ] , typeParams [ 1 ] ) ; }
Mockito
34
src/org/mockito/internal/invocation/InvocationMatcher.java
103
111
Invocation.thenReturn() may fail with varargs (similar to issue 123)
What steps will reproduce the problem? 1. Has a class with a method like this: blah(Object... o) 2. Stub it like this: when(mock.blah(anyVararg())).thenReturn(-1) 3. Run test. What is the expected output? What do you see instead? It should return -1 on both calls, instead it threw an ArrayIndexOutOfBoundsException on the second assert What version of the product are you using? On what operating system? 1.8.1 MacOS 10.5.8 Please provide any additional information below. class Foo { int blah(Object... o) { return 0; } } @Test public void shouldNotThrowAnyException() throws Exception { Foo foo = mock(Foo.class); when(foo.blah(anyVararg())).thenReturn(-1); assertEquals(-1, foo.blah(&quot;foo&quot;)); assertEquals(-1, foo.blah()); }
public void captureArgumentsFrom(Invocation i) { int k = 0; for (Matcher m : matchers) { if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(i.getArguments()[k]); } k++; } }
public void captureArgumentsFrom ( Invocation i ) { int k = 0 ; for ( Matcher m : matchers ) { if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( i . getArguments ( ) [ k ] ) ; } k ++ ; } }
public void captureArgumentsFrom(Invocation i) { int k = 0; for (Matcher m : matchers) { if (m instanceof CapturesArguments && i.getArguments().length > k) { ((CapturesArguments) m).captureFrom(i.getArguments()[k]); } k++; } }
public void captureArgumentsFrom ( Invocation i ) { int k = 0 ; for ( Matcher m : matchers ) { if ( m instanceof CapturesArguments && i . getArguments ( ) . length > k ) { ( ( CapturesArguments ) m ) . captureFrom ( i . getArguments ( ) [ k ] ) ; } k ++ ; } }
JacksonDatabind
9
src/main/java/com/fasterxml/jackson/databind/ser/std/StdKeySerializer.java
24
35
Deserializing Map<Class<? extends Object>, String>
I am having problems deserializing my `Map<Class<? extends Object>, String>`. Simple test case demonstrates it: ``` java @Test public void testMapWithClassAsKey() throws Exception { Map<Class<? extends Object>, String> map = new HashMap<>(); map.put(ArrayList.class, "ArrayList"); map.put(HashMap.class, "HashMap"); ObjectMapper mapper = new ObjectMapper(); String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(map); System.out.println(json); mapper.readValue(json, new TypeReference<Map<Class<? extends Object>, String>>(){}); } ``` This test serializes the map as: ``` json { "class java.util.ArrayList" : "ArrayList", "class java.util.HashMap" : "HashMap" } ``` `mapper.readValue(json, new TypeReference<Map<Class<? extends Object>, String>>(){});` then throws a `Exception`: ``` com.fasterxml.jackson.databind.exc.InvalidFormatException: Can not construct Map key of type java.lang.Class from String "class java.util.ArrayList": not a valid representation: Can not construct Map key of type java.lang.Class from String "class java.util.ArrayList": unable to parse key as Class at [Source: ... ``` As i understood from #630 the KeyDeserializer for Class should be part of Jackson. Am I missing something?
@Override public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException { String str; if (value instanceof Date) { provider.defaultSerializeDateKey((Date) value, jgen); return; } else { str = value.toString(); } jgen.writeFieldName(str); }
@ Override public void serialize ( Object value , JsonGenerator jgen , SerializerProvider provider ) throws IOException { String str ; if ( value instanceof Date ) { provider . defaultSerializeDateKey ( ( Date ) value , jgen ) ; return ; } else { str = value . toString ( ) ; } jgen . writeFieldName ( str ) ; }
@Override public void serialize(Object value, JsonGenerator jgen, SerializerProvider provider) throws IOException { String str; Class<?> cls = value.getClass(); if (cls == String.class) { str = (String) value; } else if (Date.class.isAssignableFrom(cls)) { provider.defaultSerializeDateKey((Date) value, jgen); return; } else if (cls == Class.class) { str = ((Class<?>) value).getName(); } else { str = value.toString(); } jgen.writeFieldName(str); }
@ Override public void serialize ( Object value , JsonGenerator jgen , SerializerProvider provider ) throws IOException { String str ; Class < ? > cls = value . getClass ( ) ; if ( cls == String . class ) { str = ( String ) value ; } else if ( Date . class . isAssignableFrom ( cls ) ) { provider . defaultSerializeDateKey ( ( Date ) value , jgen ) ; return ; } else if ( cls == Class . class ) { str = ( ( Class < ? > ) value ) . getName ( ) ; } else { str = value . toString ( ) ; } jgen . writeFieldName ( str ) ; }
JacksonDatabind
35
src/main/java/com/fasterxml/jackson/databind/jsontype/impl/AsWrapperTypeDeserializer.java
78
120
Problem with Object Id and Type Id as Wrapper Object (regression in 2.5.1)
(note: originally from https://github.com/FasterXML/jackson-module-jaxb-annotations/issues/51) Looks like fix for #669 caused a regression for the special use case of combining type and object ids, with wrapper-object type id inclusion. The problem started with 2.5.1.
@SuppressWarnings("resource") private final Object _deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { // 02-Aug-2013, tatu: May need to use native type ids if (p.canReadTypeId()) { Object typeId = p.getTypeId(); if (typeId != null) { return _deserializeWithNativeTypeId(p, ctxt, typeId); } } // first, sanity checks if (p.getCurrentToken() != JsonToken.START_OBJECT) { throw ctxt.wrongTokenException(p, JsonToken.START_OBJECT, "need JSON Object to contain As.WRAPPER_OBJECT type information for class "+baseTypeName()); } // should always get field name, but just in case... if (p.nextToken() != JsonToken.FIELD_NAME) { throw ctxt.wrongTokenException(p, JsonToken.FIELD_NAME, "need JSON String that contains type id (for subtype of "+baseTypeName()+")"); } final String typeId = p.getText(); JsonDeserializer<Object> deser = _findDeserializer(ctxt, typeId); p.nextToken(); // Minor complication: we may need to merge type id in? if (_typeIdVisible && p.getCurrentToken() == JsonToken.START_OBJECT) { // but what if there's nowhere to add it in? Error? Or skip? For now, skip. TokenBuffer tb = new TokenBuffer(null, false); tb.writeStartObject(); // recreate START_OBJECT tb.writeFieldName(_typePropertyName); tb.writeString(typeId); p = JsonParserSequence.createFlattened(tb.asParser(p), p); p.nextToken(); } Object value = deser.deserialize(p, ctxt); // And then need the closing END_OBJECT if (p.nextToken() != JsonToken.END_OBJECT) { throw ctxt.wrongTokenException(p, JsonToken.END_OBJECT, "expected closing END_OBJECT after type information and deserialized value"); } return value; }
@ SuppressWarnings ( "resource" ) private final Object _deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { if ( p . canReadTypeId ( ) ) { Object typeId = p . getTypeId ( ) ; if ( typeId != null ) { return _deserializeWithNativeTypeId ( p , ctxt , typeId ) ; } } if ( p . getCurrentToken ( ) != JsonToken . START_OBJECT ) { throw ctxt . wrongTokenException ( p , JsonToken . START_OBJECT , "need JSON Object to contain As.WRAPPER_OBJECT type information for class " + baseTypeName ( ) ) ; } if ( p . nextToken ( ) != JsonToken . FIELD_NAME ) { throw ctxt . wrongTokenException ( p , JsonToken . FIELD_NAME , "need JSON String that contains type id (for subtype of " + baseTypeName ( ) + ")" ) ; } final String typeId = p . getText ( ) ; JsonDeserializer < Object > deser = _findDeserializer ( ctxt , typeId ) ; p . nextToken ( ) ; if ( _typeIdVisible && p . getCurrentToken ( ) == JsonToken . START_OBJECT ) { TokenBuffer tb = new TokenBuffer ( null , false ) ; tb . writeStartObject ( ) ; tb . writeFieldName ( _typePropertyName ) ; tb . writeString ( typeId ) ; p = JsonParserSequence . createFlattened ( tb . asParser ( p ) , p ) ; p . nextToken ( ) ; } Object value = deser . deserialize ( p , ctxt ) ; if ( p . nextToken ( ) != JsonToken . END_OBJECT ) { throw ctxt . wrongTokenException ( p , JsonToken . END_OBJECT , "expected closing END_OBJECT after type information and deserialized value" ) ; } return value ; }
@SuppressWarnings("resource") private final Object _deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { // 02-Aug-2013, tatu: May need to use native type ids if (p.canReadTypeId()) { Object typeId = p.getTypeId(); if (typeId != null) { return _deserializeWithNativeTypeId(p, ctxt, typeId); } } // first, sanity checks JsonToken t = p.getCurrentToken(); if (t == JsonToken.START_OBJECT) { // should always get field name, but just in case... if (p.nextToken() != JsonToken.FIELD_NAME) { throw ctxt.wrongTokenException(p, JsonToken.FIELD_NAME, "need JSON String that contains type id (for subtype of "+baseTypeName()+")"); } } else if (t != JsonToken.FIELD_NAME) { throw ctxt.wrongTokenException(p, JsonToken.START_OBJECT, "need JSON Object to contain As.WRAPPER_OBJECT type information for class "+baseTypeName()); } final String typeId = p.getText(); JsonDeserializer<Object> deser = _findDeserializer(ctxt, typeId); p.nextToken(); // Minor complication: we may need to merge type id in? if (_typeIdVisible && p.getCurrentToken() == JsonToken.START_OBJECT) { // but what if there's nowhere to add it in? Error? Or skip? For now, skip. TokenBuffer tb = new TokenBuffer(null, false); tb.writeStartObject(); // recreate START_OBJECT tb.writeFieldName(_typePropertyName); tb.writeString(typeId); p = JsonParserSequence.createFlattened(tb.asParser(p), p); p.nextToken(); } Object value = deser.deserialize(p, ctxt); // And then need the closing END_OBJECT if (p.nextToken() != JsonToken.END_OBJECT) { throw ctxt.wrongTokenException(p, JsonToken.END_OBJECT, "expected closing END_OBJECT after type information and deserialized value"); } return value; }
@ SuppressWarnings ( "resource" ) private final Object _deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { if ( p . canReadTypeId ( ) ) { Object typeId = p . getTypeId ( ) ; if ( typeId != null ) { return _deserializeWithNativeTypeId ( p , ctxt , typeId ) ; } } JsonToken t = p . getCurrentToken ( ) ; if ( t == JsonToken . START_OBJECT ) { if ( p . nextToken ( ) != JsonToken . FIELD_NAME ) { throw ctxt . wrongTokenException ( p , JsonToken . FIELD_NAME , "need JSON String that contains type id (for subtype of " + baseTypeName ( ) + ")" ) ; } } else if ( t != JsonToken . FIELD_NAME ) { throw ctxt . wrongTokenException ( p , JsonToken . START_OBJECT , "need JSON Object to contain As.WRAPPER_OBJECT type information for class " + baseTypeName ( ) ) ; } final String typeId = p . getText ( ) ; JsonDeserializer < Object > deser = _findDeserializer ( ctxt , typeId ) ; p . nextToken ( ) ; if ( _typeIdVisible && p . getCurrentToken ( ) == JsonToken . START_OBJECT ) { TokenBuffer tb = new TokenBuffer ( null , false ) ; tb . writeStartObject ( ) ; tb . writeFieldName ( _typePropertyName ) ; tb . writeString ( typeId ) ; p = JsonParserSequence . createFlattened ( tb . asParser ( p ) , p ) ; p . nextToken ( ) ; } Object value = deser . deserialize ( p , ctxt ) ; if ( p . nextToken ( ) != JsonToken . END_OBJECT ) { throw ctxt . wrongTokenException ( p , JsonToken . END_OBJECT , "expected closing END_OBJECT after type information and deserialized value" ) ; } return value ; }
Math
106
src/java/org/apache/commons/math/fraction/ProperFractionFormat.java
130
206
[math] Function math.fraction.ProperFractionFormat.parse(String, ParsePosition) return illogical result
Hello, I find illogical returned result from function "Fraction parse(String source, ParsePostion pos)" (in class ProperFractionFormat of the Fraction Package) of the Commons Math library. Please see the following code segment for more details: " ProperFractionFormat properFormat = new ProperFractionFormat(); result = null; String source = "1 -1 / 2"; ParsePosition pos = new ParsePosition(0); //Test 1 : fail public void testParseNegative(){ String source = "-1 -2 / 3"; ParsePosition pos = new ParsePosition(0); Fraction actual = properFormat.parse(source, pos); assertNull(actual); } // Test2: success public void testParseNegative(){ String source = "-1 -2 / 3"; ParsePosition pos = new ParsePosition(0); Fraction actual = properFormat.parse(source, pos); // return Fraction 1/3 assertEquals(1, source.getNumerator()); assertEquals(3, source.getDenominator()); } " Note: Similarly, when I passed in the following inputs: input 2: (source = “1 2 / -3”, pos = 0) input 3: ( source = ” -1 -2 / 3”, pos = 0) Function "Fraction parse(String, ParsePosition)" returned Fraction 1/3 (means the result Fraction had numerator = 1 and denominator = 3)for all 3 inputs above. I think the function does not handle parsing the numberator/ denominator properly incase input string provide invalid numerator/denominator. Thank you!
public Fraction parse(String source, ParsePosition pos) { // try to parse improper fraction Fraction ret = super.parse(source, pos); if (ret != null) { return ret; } int initialIndex = pos.getIndex(); // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse whole Number whole = getWholeFormat().parse(source, pos); if (whole == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse numerator Number num = getNumeratorFormat().parse(source, pos); if (num == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } // minus signs should be leading, invalid expression // parse '/' int startIndex = pos.getIndex(); char c = parseNextCharacter(source, pos); switch (c) { case 0 : // no '/' // return num as a fraction return new Fraction(num.intValue(), 1); case '/' : // found '/', continue parsing denominator break; default : // invalid '/' // set index back to initial, error index should be the last // character examined. pos.setIndex(initialIndex); pos.setErrorIndex(startIndex); return null; } // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse denominator Number den = getDenominatorFormat().parse(source, pos); if (den == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } // minus signs must be leading, invalid int w = whole.intValue(); int n = num.intValue(); int d = den.intValue(); return new Fraction(((Math.abs(w) * d) + n) * MathUtils.sign(w), d); }
public Fraction parse ( String source , ParsePosition pos ) { Fraction ret = super . parse ( source , pos ) ; if ( ret != null ) { return ret ; } int initialIndex = pos . getIndex ( ) ; parseAndIgnoreWhitespace ( source , pos ) ; Number whole = getWholeFormat ( ) . parse ( source , pos ) ; if ( whole == null ) { pos . setIndex ( initialIndex ) ; return null ; } parseAndIgnoreWhitespace ( source , pos ) ; Number num = getNumeratorFormat ( ) . parse ( source , pos ) ; if ( num == null ) { pos . setIndex ( initialIndex ) ; return null ; } int startIndex = pos . getIndex ( ) ; char c = parseNextCharacter ( source , pos ) ; switch ( c ) { case 0 : return new Fraction ( num . intValue ( ) , 1 ) ; case '/' : break ; default : pos . setIndex ( initialIndex ) ; pos . setErrorIndex ( startIndex ) ; return null ; } parseAndIgnoreWhitespace ( source , pos ) ; Number den = getDenominatorFormat ( ) . parse ( source , pos ) ; if ( den == null ) { pos . setIndex ( initialIndex ) ; return null ; } int w = whole . intValue ( ) ; int n = num . intValue ( ) ; int d = den . intValue ( ) ; return new Fraction ( ( ( Math . abs ( w ) * d ) + n ) * MathUtils . sign ( w ) , d ) ; }
public Fraction parse(String source, ParsePosition pos) { // try to parse improper fraction Fraction ret = super.parse(source, pos); if (ret != null) { return ret; } int initialIndex = pos.getIndex(); // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse whole Number whole = getWholeFormat().parse(source, pos); if (whole == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse numerator Number num = getNumeratorFormat().parse(source, pos); if (num == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } if (num.intValue() < 0) { // minus signs should be leading, invalid expression pos.setIndex(initialIndex); return null; } // parse '/' int startIndex = pos.getIndex(); char c = parseNextCharacter(source, pos); switch (c) { case 0 : // no '/' // return num as a fraction return new Fraction(num.intValue(), 1); case '/' : // found '/', continue parsing denominator break; default : // invalid '/' // set index back to initial, error index should be the last // character examined. pos.setIndex(initialIndex); pos.setErrorIndex(startIndex); return null; } // parse whitespace parseAndIgnoreWhitespace(source, pos); // parse denominator Number den = getDenominatorFormat().parse(source, pos); if (den == null) { // invalid integer number // set index back to initial, error index should already be set // character examined. pos.setIndex(initialIndex); return null; } if (den.intValue() < 0) { // minus signs must be leading, invalid pos.setIndex(initialIndex); return null; } int w = whole.intValue(); int n = num.intValue(); int d = den.intValue(); return new Fraction(((Math.abs(w) * d) + n) * MathUtils.sign(w), d); }
public Fraction parse ( String source , ParsePosition pos ) { Fraction ret = super . parse ( source , pos ) ; if ( ret != null ) { return ret ; } int initialIndex = pos . getIndex ( ) ; parseAndIgnoreWhitespace ( source , pos ) ; Number whole = getWholeFormat ( ) . parse ( source , pos ) ; if ( whole == null ) { pos . setIndex ( initialIndex ) ; return null ; } parseAndIgnoreWhitespace ( source , pos ) ; Number num = getNumeratorFormat ( ) . parse ( source , pos ) ; if ( num == null ) { pos . setIndex ( initialIndex ) ; return null ; } if ( num . intValue ( ) < 0 ) { pos . setIndex ( initialIndex ) ; return null ; } int startIndex = pos . getIndex ( ) ; char c = parseNextCharacter ( source , pos ) ; switch ( c ) { case 0 : return new Fraction ( num . intValue ( ) , 1 ) ; case '/' : break ; default : pos . setIndex ( initialIndex ) ; pos . setErrorIndex ( startIndex ) ; return null ; } parseAndIgnoreWhitespace ( source , pos ) ; Number den = getDenominatorFormat ( ) . parse ( source , pos ) ; if ( den == null ) { pos . setIndex ( initialIndex ) ; return null ; } if ( den . intValue ( ) < 0 ) { pos . setIndex ( initialIndex ) ; return null ; } int w = whole . intValue ( ) ; int n = num . intValue ( ) ; int d = den . intValue ( ) ; return new Fraction ( ( ( Math . abs ( w ) * d ) + n ) * MathUtils . sign ( w ) , d ) ; }
Math
50
src/main/java/org/apache/commons/math/analysis/solvers/BaseSecantSolver.java
128
253
"RegulaFalsiSolver" failure
The following unit test: {code} @Test public void testBug() { final UnivariateRealFunction f = new UnivariateRealFunction() { @Override public double value(double x) { return Math.exp(x) - Math.pow(Math.PI, 3.0); } }; UnivariateRealSolver solver = new RegulaFalsiSolver(); double root = solver.solve(100, f, 1, 10); } {code} fails with {noformat} illegal state: maximal count (100) exceeded: evaluations {noformat} Using "PegasusSolver", the answer is found after 17 evaluations.
protected final double doSolve() { // Get initial solution double x0 = getMin(); double x1 = getMax(); double f0 = computeObjectiveValue(x0); double f1 = computeObjectiveValue(x1); // If one of the bounds is the exact root, return it. Since these are // not under-approximations or over-approximations, we can return them // regardless of the allowed solutions. if (f0 == 0.0) { return x0; } if (f1 == 0.0) { return x1; } // Verify bracketing of initial solution. verifyBracketing(x0, x1); // Get accuracies. final double ftol = getFunctionValueAccuracy(); final double atol = getAbsoluteAccuracy(); final double rtol = getRelativeAccuracy(); // Keep track of inverted intervals, meaning that the left bound is // larger than the right bound. boolean inverted = false; // Keep finding better approximations. while (true) { // Calculate the next approximation. final double x = x1 - ((f1 * (x1 - x0)) / (f1 - f0)); final double fx = computeObjectiveValue(x); // If the new approximation is the exact root, return it. Since // this is not an under-approximation or an over-approximation, // we can return it regardless of the allowed solutions. if (fx == 0.0) { return x; } // Update the bounds with the new approximation. if (f1 * fx < 0) { // The value of x1 has switched to the other bound, thus inverting // the interval. x0 = x1; f0 = f1; inverted = !inverted; } else { switch (method) { case ILLINOIS: f0 *= 0.5; break; case PEGASUS: f0 *= f1 / (f1 + fx); break; case REGULA_FALSI: // Nothing. if (x == x1) { x0 = 0.5 * (x0 + x1 - FastMath.max(rtol * FastMath.abs(x1), atol)); f0 = computeObjectiveValue(x0); } break; default: // Should never happen. throw new MathInternalError(); } } // Update from [x0, x1] to [x0, x]. x1 = x; f1 = fx; // If the function value of the last approximation is too small, // given the function value accuracy, then we can't get closer to // the root than we already are. if (FastMath.abs(f1) <= ftol) { switch (allowed) { case ANY_SIDE: return x1; case LEFT_SIDE: if (inverted) { return x1; } break; case RIGHT_SIDE: if (!inverted) { return x1; } break; case BELOW_SIDE: if (f1 <= 0) { return x1; } break; case ABOVE_SIDE: if (f1 >= 0) { return x1; } break; default: throw new MathInternalError(); } } // If the current interval is within the given accuracies, we // are satisfied with the current approximation. if (FastMath.abs(x1 - x0) < FastMath.max(rtol * FastMath.abs(x1), atol)) { switch (allowed) { case ANY_SIDE: return x1; case LEFT_SIDE: return inverted ? x1 : x0; case RIGHT_SIDE: return inverted ? x0 : x1; case BELOW_SIDE: return (f1 <= 0) ? x1 : x0; case ABOVE_SIDE: return (f1 >= 0) ? x1 : x0; default: throw new MathInternalError(); } } } }
protected final double doSolve ( ) { double x0 = getMin ( ) ; double x1 = getMax ( ) ; double f0 = computeObjectiveValue ( x0 ) ; double f1 = computeObjectiveValue ( x1 ) ; if ( f0 == 0.0 ) { return x0 ; } if ( f1 == 0.0 ) { return x1 ; } verifyBracketing ( x0 , x1 ) ; final double ftol = getFunctionValueAccuracy ( ) ; final double atol = getAbsoluteAccuracy ( ) ; final double rtol = getRelativeAccuracy ( ) ; boolean inverted = false ; while ( true ) { final double x = x1 - ( ( f1 * ( x1 - x0 ) ) / ( f1 - f0 ) ) ; final double fx = computeObjectiveValue ( x ) ; if ( fx == 0.0 ) { return x ; } if ( f1 * fx < 0 ) { x0 = x1 ; f0 = f1 ; inverted = ! inverted ; } else { switch ( method ) { case ILLINOIS : f0 *= 0.5 ; break ; case PEGASUS : f0 *= f1 / ( f1 + fx ) ; break ; case REGULA_FALSI : if ( x == x1 ) { x0 = 0.5 * ( x0 + x1 - FastMath . max ( rtol * FastMath . abs ( x1 ) , atol ) ) ; f0 = computeObjectiveValue ( x0 ) ; } break ; default : throw new MathInternalError ( ) ; } } x1 = x ; f1 = fx ; if ( FastMath . abs ( f1 ) <= ftol ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : if ( inverted ) { return x1 ; } break ; case RIGHT_SIDE : if ( ! inverted ) { return x1 ; } break ; case BELOW_SIDE : if ( f1 <= 0 ) { return x1 ; } break ; case ABOVE_SIDE : if ( f1 >= 0 ) { return x1 ; } break ; default : throw new MathInternalError ( ) ; } } if ( FastMath . abs ( x1 - x0 ) < FastMath . max ( rtol * FastMath . abs ( x1 ) , atol ) ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : return inverted ? x1 : x0 ; case RIGHT_SIDE : return inverted ? x0 : x1 ; case BELOW_SIDE : return ( f1 <= 0 ) ? x1 : x0 ; case ABOVE_SIDE : return ( f1 >= 0 ) ? x1 : x0 ; default : throw new MathInternalError ( ) ; } } } }
protected final double doSolve() { // Get initial solution double x0 = getMin(); double x1 = getMax(); double f0 = computeObjectiveValue(x0); double f1 = computeObjectiveValue(x1); // If one of the bounds is the exact root, return it. Since these are // not under-approximations or over-approximations, we can return them // regardless of the allowed solutions. if (f0 == 0.0) { return x0; } if (f1 == 0.0) { return x1; } // Verify bracketing of initial solution. verifyBracketing(x0, x1); // Get accuracies. final double ftol = getFunctionValueAccuracy(); final double atol = getAbsoluteAccuracy(); final double rtol = getRelativeAccuracy(); // Keep track of inverted intervals, meaning that the left bound is // larger than the right bound. boolean inverted = false; // Keep finding better approximations. while (true) { // Calculate the next approximation. final double x = x1 - ((f1 * (x1 - x0)) / (f1 - f0)); final double fx = computeObjectiveValue(x); // If the new approximation is the exact root, return it. Since // this is not an under-approximation or an over-approximation, // we can return it regardless of the allowed solutions. if (fx == 0.0) { return x; } // Update the bounds with the new approximation. if (f1 * fx < 0) { // The value of x1 has switched to the other bound, thus inverting // the interval. x0 = x1; f0 = f1; inverted = !inverted; } else { switch (method) { case ILLINOIS: f0 *= 0.5; break; case PEGASUS: f0 *= f1 / (f1 + fx); break; case REGULA_FALSI: // Nothing. break; default: // Should never happen. throw new MathInternalError(); } } // Update from [x0, x1] to [x0, x]. x1 = x; f1 = fx; // If the function value of the last approximation is too small, // given the function value accuracy, then we can't get closer to // the root than we already are. if (FastMath.abs(f1) <= ftol) { switch (allowed) { case ANY_SIDE: return x1; case LEFT_SIDE: if (inverted) { return x1; } break; case RIGHT_SIDE: if (!inverted) { return x1; } break; case BELOW_SIDE: if (f1 <= 0) { return x1; } break; case ABOVE_SIDE: if (f1 >= 0) { return x1; } break; default: throw new MathInternalError(); } } // If the current interval is within the given accuracies, we // are satisfied with the current approximation. if (FastMath.abs(x1 - x0) < FastMath.max(rtol * FastMath.abs(x1), atol)) { switch (allowed) { case ANY_SIDE: return x1; case LEFT_SIDE: return inverted ? x1 : x0; case RIGHT_SIDE: return inverted ? x0 : x1; case BELOW_SIDE: return (f1 <= 0) ? x1 : x0; case ABOVE_SIDE: return (f1 >= 0) ? x1 : x0; default: throw new MathInternalError(); } } } }
protected final double doSolve ( ) { double x0 = getMin ( ) ; double x1 = getMax ( ) ; double f0 = computeObjectiveValue ( x0 ) ; double f1 = computeObjectiveValue ( x1 ) ; if ( f0 == 0.0 ) { return x0 ; } if ( f1 == 0.0 ) { return x1 ; } verifyBracketing ( x0 , x1 ) ; final double ftol = getFunctionValueAccuracy ( ) ; final double atol = getAbsoluteAccuracy ( ) ; final double rtol = getRelativeAccuracy ( ) ; boolean inverted = false ; while ( true ) { final double x = x1 - ( ( f1 * ( x1 - x0 ) ) / ( f1 - f0 ) ) ; final double fx = computeObjectiveValue ( x ) ; if ( fx == 0.0 ) { return x ; } if ( f1 * fx < 0 ) { x0 = x1 ; f0 = f1 ; inverted = ! inverted ; } else { switch ( method ) { case ILLINOIS : f0 *= 0.5 ; break ; case PEGASUS : f0 *= f1 / ( f1 + fx ) ; break ; case REGULA_FALSI : break ; default : throw new MathInternalError ( ) ; } } x1 = x ; f1 = fx ; if ( FastMath . abs ( f1 ) <= ftol ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : if ( inverted ) { return x1 ; } break ; case RIGHT_SIDE : if ( ! inverted ) { return x1 ; } break ; case BELOW_SIDE : if ( f1 <= 0 ) { return x1 ; } break ; case ABOVE_SIDE : if ( f1 >= 0 ) { return x1 ; } break ; default : throw new MathInternalError ( ) ; } } if ( FastMath . abs ( x1 - x0 ) < FastMath . max ( rtol * FastMath . abs ( x1 ) , atol ) ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : return inverted ? x1 : x0 ; case RIGHT_SIDE : return inverted ? x0 : x1 ; case BELOW_SIDE : return ( f1 <= 0 ) ? x1 : x0 ; case ABOVE_SIDE : return ( f1 >= 0 ) ? x1 : x0 ; default : throw new MathInternalError ( ) ; } } } }
Mockito
18
src/org/mockito/internal/stubbing/defaultanswers/ReturnsEmptyValues.java
82
118
Return empty value for Iterables
http://code.google.com/p/mockito/issues/detail?id=175 I expect an Iterable to be mocked by default with an empty Iterable. I understand from the initial issue this behavior would be introduced in Mockito 2, but beta-8 still returns null. Could we return null for Iterables ? Should we have the same behavior for Iterator ? Thanks
Object returnValueFor(Class<?> type) { if (Primitives.isPrimitiveOrWrapper(type)) { return Primitives.defaultValueForPrimitiveOrWrapper(type); //new instances are used instead of Collections.emptyList(), etc. //to avoid UnsupportedOperationException if code under test modifies returned collection } else if (type == Collection.class) { return new LinkedList<Object>(); } else if (type == Set.class) { return new HashSet<Object>(); } else if (type == HashSet.class) { return new HashSet<Object>(); } else if (type == SortedSet.class) { return new TreeSet<Object>(); } else if (type == TreeSet.class) { return new TreeSet<Object>(); } else if (type == LinkedHashSet.class) { return new LinkedHashSet<Object>(); } else if (type == List.class) { return new LinkedList<Object>(); } else if (type == LinkedList.class) { return new LinkedList<Object>(); } else if (type == ArrayList.class) { return new ArrayList<Object>(); } else if (type == Map.class) { return new HashMap<Object, Object>(); } else if (type == HashMap.class) { return new HashMap<Object, Object>(); } else if (type == SortedMap.class) { return new TreeMap<Object, Object>(); } else if (type == TreeMap.class) { return new TreeMap<Object, Object>(); } else if (type == LinkedHashMap.class) { return new LinkedHashMap<Object, Object>(); } //Let's not care about the rest of collections. return null; }
Object returnValueFor ( Class < ? > type ) { if ( Primitives . isPrimitiveOrWrapper ( type ) ) { return Primitives . defaultValueForPrimitiveOrWrapper ( type ) ; } else if ( type == Collection . class ) { return new LinkedList < Object > ( ) ; } else if ( type == Set . class ) { return new HashSet < Object > ( ) ; } else if ( type == HashSet . class ) { return new HashSet < Object > ( ) ; } else if ( type == SortedSet . class ) { return new TreeSet < Object > ( ) ; } else if ( type == TreeSet . class ) { return new TreeSet < Object > ( ) ; } else if ( type == LinkedHashSet . class ) { return new LinkedHashSet < Object > ( ) ; } else if ( type == List . class ) { return new LinkedList < Object > ( ) ; } else if ( type == LinkedList . class ) { return new LinkedList < Object > ( ) ; } else if ( type == ArrayList . class ) { return new ArrayList < Object > ( ) ; } else if ( type == Map . class ) { return new HashMap < Object , Object > ( ) ; } else if ( type == HashMap . class ) { return new HashMap < Object , Object > ( ) ; } else if ( type == SortedMap . class ) { return new TreeMap < Object , Object > ( ) ; } else if ( type == TreeMap . class ) { return new TreeMap < Object , Object > ( ) ; } else if ( type == LinkedHashMap . class ) { return new LinkedHashMap < Object , Object > ( ) ; } return null ; }
Object returnValueFor(Class<?> type) { if (Primitives.isPrimitiveOrWrapper(type)) { return Primitives.defaultValueForPrimitiveOrWrapper(type); //new instances are used instead of Collections.emptyList(), etc. //to avoid UnsupportedOperationException if code under test modifies returned collection } else if (type == Iterable.class) { return new ArrayList<Object>(0); } else if (type == Collection.class) { return new LinkedList<Object>(); } else if (type == Set.class) { return new HashSet<Object>(); } else if (type == HashSet.class) { return new HashSet<Object>(); } else if (type == SortedSet.class) { return new TreeSet<Object>(); } else if (type == TreeSet.class) { return new TreeSet<Object>(); } else if (type == LinkedHashSet.class) { return new LinkedHashSet<Object>(); } else if (type == List.class) { return new LinkedList<Object>(); } else if (type == LinkedList.class) { return new LinkedList<Object>(); } else if (type == ArrayList.class) { return new ArrayList<Object>(); } else if (type == Map.class) { return new HashMap<Object, Object>(); } else if (type == HashMap.class) { return new HashMap<Object, Object>(); } else if (type == SortedMap.class) { return new TreeMap<Object, Object>(); } else if (type == TreeMap.class) { return new TreeMap<Object, Object>(); } else if (type == LinkedHashMap.class) { return new LinkedHashMap<Object, Object>(); } //Let's not care about the rest of collections. return null; }
Object returnValueFor ( Class < ? > type ) { if ( Primitives . isPrimitiveOrWrapper ( type ) ) { return Primitives . defaultValueForPrimitiveOrWrapper ( type ) ; } else if ( type == Iterable . class ) { return new ArrayList < Object > ( 0 ) ; } else if ( type == Collection . class ) { return new LinkedList < Object > ( ) ; } else if ( type == Set . class ) { return new HashSet < Object > ( ) ; } else if ( type == HashSet . class ) { return new HashSet < Object > ( ) ; } else if ( type == SortedSet . class ) { return new TreeSet < Object > ( ) ; } else if ( type == TreeSet . class ) { return new TreeSet < Object > ( ) ; } else if ( type == LinkedHashSet . class ) { return new LinkedHashSet < Object > ( ) ; } else if ( type == List . class ) { return new LinkedList < Object > ( ) ; } else if ( type == LinkedList . class ) { return new LinkedList < Object > ( ) ; } else if ( type == ArrayList . class ) { return new ArrayList < Object > ( ) ; } else if ( type == Map . class ) { return new HashMap < Object , Object > ( ) ; } else if ( type == HashMap . class ) { return new HashMap < Object , Object > ( ) ; } else if ( type == SortedMap . class ) { return new TreeMap < Object , Object > ( ) ; } else if ( type == TreeMap . class ) { return new TreeMap < Object , Object > ( ) ; } else if ( type == LinkedHashMap . class ) { return new LinkedHashMap < Object , Object > ( ) ; } return null ; }
JxPath
21
src/java/org/apache/commons/jxpath/ri/model/beans/PropertyPointer.java
151
153
null handling is inconsistent
Comparing a vaule to null using unequals (\!=) yields false! {noformat} Map<String, Integer> m = new HashMap<String, Integer>(); m.put("a", 1); m.put("b", null); m.put("c", 1); JXPathContext c = JXPathContext.newContext(m); System.out.println(c.getValue("a != b") + " should be true"); System.out.println(c.getValue("a != c") + " should be false"); System.out.println(c.getValue("a = b") + " should be false"); System.out.println(c.getValue("a = c") + " should be true"); System.out.println(c.getValue("not(a = b)") + " should be true"); System.out.println(c.getValue("not(a = c)") + " should be false"); {noformat} Output using 1.3: {color:red} false should be true{color} false should be false false should be false true should be true true should be true false should be false In 1.2 it works correctly!
public int getLength() { return ValueUtils.getLength(getBaseValue()); }
public int getLength ( ) { return ValueUtils . getLength ( getBaseValue ( ) ) ; }
public int getLength() { Object baseValue = getBaseValue(); return baseValue == null ? 1 : ValueUtils.getLength(baseValue); }
public int getLength ( ) { Object baseValue = getBaseValue ( ) ; return baseValue == null ? 1 : ValueUtils . getLength ( baseValue ) ; }
JacksonDatabind
62
src/main/java/com/fasterxml/jackson/databind/deser/std/CollectionDeserializer.java
169
208
Custom UnmodifiableSetMixin Fails in Jackson 2.7+ but works in Jackson 2.6
I'd like to be able to deserialize an `UnmodifiableSet` with default typing enabled. To do this I have created an `UnmodifiableSetMixin` as shown below: **NOTE**: You can find a minimal project with all the source code to reproduce this issue at https://github.com/rwinch/jackson-unmodifiableset-mixin ``` java import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeInfo; import java.util.Set; @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY) public abstract class UnmodifiableSetMixin { @JsonCreator public UnmodifiableSetMixin(Set<?> s) {} } ``` I then try to use this to deserialize an empty set. ``` java public class UnmodifiableSetMixinTest { static final String EXPECTED_JSON = "[\"java.util.Collections$UnmodifiableSet\",[]]"; ObjectMapper mapper; @Before public void setup() { mapper = new ObjectMapper(); mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); mapper.addMixIn(Collections.unmodifiableSet(Collections.<String>emptySet()).getClass(), UnmodifiableSetMixin.class); } @Test @SuppressWarnings("unchecked") public void read() throws Exception { Set<String> foo = mapper.readValue(EXPECTED_JSON, Set.class); assertThat(foo).isEmpty(); } } ``` The test passes with Jackson 2.6, but fails using Jackson 2.7+ (including Jackson 2.8.3) with the following stack trace: ``` java.lang.IllegalStateException: No default constructor for [collection type; class java.util.Collections$UnmodifiableSet, contains [simple type, class java.lang.Object]] at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createUsingDefault(StdValueInstantiator.java:240) at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:249) at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:26) at com.fasterxml.jackson.databind.jsontype.impl.AsArrayTypeDeserializer._deserialize(AsArrayTypeDeserializer.java:110) at com.fasterxml.jackson.databind.jsontype.impl.AsArrayTypeDeserializer.deserializeTypedFromArray(AsArrayTypeDeserializer.java:50) at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserializeWithType(CollectionDeserializer.java:310) at com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer.deserialize(TypeWrappedDeserializer.java:42) at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3788) at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2779) at sample.UnmodifiableSetMixinTest.read(UnmodifiableSetMixinTest.java:36) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) at org.junit.runners.ParentRunner.run(ParentRunner.java:363) at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:86) at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:459) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:678) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:382) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:192) ``` This seems like a passivity issue. Is there a workaround for this problem?
@Override public CollectionDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { // May need to resolve types for delegate-based creators: JsonDeserializer<Object> delegateDeser = null; if (_valueInstantiator != null) { if (_valueInstantiator.canCreateUsingDelegate()) { JavaType delegateType = _valueInstantiator.getDelegateType(ctxt.getConfig()); if (delegateType == null) { throw new IllegalArgumentException("Invalid delegate-creator definition for "+_collectionType +": value instantiator ("+_valueInstantiator.getClass().getName() +") returned true for 'canCreateUsingDelegate()', but null for 'getDelegateType()'"); } delegateDeser = findDeserializer(ctxt, delegateType, property); } } // [databind#1043]: allow per-property allow-wrapping of single overrides: // 11-Dec-2015, tatu: Should we pass basic `Collection.class`, or more refined? Mostly // comes down to "List vs Collection" I suppose... for now, pass Collection Boolean unwrapSingle = findFormatFeature(ctxt, property, Collection.class, JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY); // also, often value deserializer is resolved here: JsonDeserializer<?> valueDeser = _valueDeserializer; // May have a content converter valueDeser = findConvertingContentDeserializer(ctxt, property, valueDeser); final JavaType vt = _collectionType.getContentType(); if (valueDeser == null) { valueDeser = ctxt.findContextualValueDeserializer(vt, property); } else { // if directly assigned, probably not yet contextual, so: valueDeser = ctxt.handleSecondaryContextualization(valueDeser, property, vt); } // and finally, type deserializer needs context as well TypeDeserializer valueTypeDeser = _valueTypeDeserializer; if (valueTypeDeser != null) { valueTypeDeser = valueTypeDeser.forProperty(property); } return withResolved(delegateDeser, valueDeser, valueTypeDeser, unwrapSingle); }
@ Override public CollectionDeserializer createContextual ( DeserializationContext ctxt , BeanProperty property ) throws JsonMappingException { JsonDeserializer < Object > delegateDeser = null ; if ( _valueInstantiator != null ) { if ( _valueInstantiator . canCreateUsingDelegate ( ) ) { JavaType delegateType = _valueInstantiator . getDelegateType ( ctxt . getConfig ( ) ) ; if ( delegateType == null ) { throw new IllegalArgumentException ( "Invalid delegate-creator definition for " + _collectionType + ": value instantiator (" + _valueInstantiator . getClass ( ) . getName ( ) + ") returned true for 'canCreateUsingDelegate()', but null for 'getDelegateType()'" ) ; } delegateDeser = findDeserializer ( ctxt , delegateType , property ) ; } } Boolean unwrapSingle = findFormatFeature ( ctxt , property , Collection . class , JsonFormat . Feature . ACCEPT_SINGLE_VALUE_AS_ARRAY ) ; JsonDeserializer < ? > valueDeser = _valueDeserializer ; valueDeser = findConvertingContentDeserializer ( ctxt , property , valueDeser ) ; final JavaType vt = _collectionType . getContentType ( ) ; if ( valueDeser == null ) { valueDeser = ctxt . findContextualValueDeserializer ( vt , property ) ; } else { valueDeser = ctxt . handleSecondaryContextualization ( valueDeser , property , vt ) ; } TypeDeserializer valueTypeDeser = _valueTypeDeserializer ; if ( valueTypeDeser != null ) { valueTypeDeser = valueTypeDeser . forProperty ( property ) ; } return withResolved ( delegateDeser , valueDeser , valueTypeDeser , unwrapSingle ) ; }
@Override public CollectionDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { // May need to resolve types for delegate-based creators: JsonDeserializer<Object> delegateDeser = null; if (_valueInstantiator != null) { if (_valueInstantiator.canCreateUsingDelegate()) { JavaType delegateType = _valueInstantiator.getDelegateType(ctxt.getConfig()); if (delegateType == null) { throw new IllegalArgumentException("Invalid delegate-creator definition for "+_collectionType +": value instantiator ("+_valueInstantiator.getClass().getName() +") returned true for 'canCreateUsingDelegate()', but null for 'getDelegateType()'"); } delegateDeser = findDeserializer(ctxt, delegateType, property); } else if (_valueInstantiator.canCreateUsingArrayDelegate()) { JavaType delegateType = _valueInstantiator.getArrayDelegateType(ctxt.getConfig()); if (delegateType == null) { throw new IllegalArgumentException("Invalid array-delegate-creator definition for "+_collectionType +": value instantiator ("+_valueInstantiator.getClass().getName() +") returned true for 'canCreateUsingArrayDelegate()', but null for 'getArrayDelegateType()'"); } delegateDeser = findDeserializer(ctxt, delegateType, property); } } // [databind#1043]: allow per-property allow-wrapping of single overrides: // 11-Dec-2015, tatu: Should we pass basic `Collection.class`, or more refined? Mostly // comes down to "List vs Collection" I suppose... for now, pass Collection Boolean unwrapSingle = findFormatFeature(ctxt, property, Collection.class, JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY); // also, often value deserializer is resolved here: JsonDeserializer<?> valueDeser = _valueDeserializer; // May have a content converter valueDeser = findConvertingContentDeserializer(ctxt, property, valueDeser); final JavaType vt = _collectionType.getContentType(); if (valueDeser == null) { valueDeser = ctxt.findContextualValueDeserializer(vt, property); } else { // if directly assigned, probably not yet contextual, so: valueDeser = ctxt.handleSecondaryContextualization(valueDeser, property, vt); } // and finally, type deserializer needs context as well TypeDeserializer valueTypeDeser = _valueTypeDeserializer; if (valueTypeDeser != null) { valueTypeDeser = valueTypeDeser.forProperty(property); } return withResolved(delegateDeser, valueDeser, valueTypeDeser, unwrapSingle); }
@ Override public CollectionDeserializer createContextual ( DeserializationContext ctxt , BeanProperty property ) throws JsonMappingException { JsonDeserializer < Object > delegateDeser = null ; if ( _valueInstantiator != null ) { if ( _valueInstantiator . canCreateUsingDelegate ( ) ) { JavaType delegateType = _valueInstantiator . getDelegateType ( ctxt . getConfig ( ) ) ; if ( delegateType == null ) { throw new IllegalArgumentException ( "Invalid delegate-creator definition for " + _collectionType + ": value instantiator (" + _valueInstantiator . getClass ( ) . getName ( ) + ") returned true for 'canCreateUsingDelegate()', but null for 'getDelegateType()'" ) ; } delegateDeser = findDeserializer ( ctxt , delegateType , property ) ; } else if ( _valueInstantiator . canCreateUsingArrayDelegate ( ) ) { JavaType delegateType = _valueInstantiator . getArrayDelegateType ( ctxt . getConfig ( ) ) ; if ( delegateType == null ) { throw new IllegalArgumentException ( "Invalid array-delegate-creator definition for " + _collectionType + ": value instantiator (" + _valueInstantiator . getClass ( ) . getName ( ) + ") returned true for 'canCreateUsingArrayDelegate()', but null for 'getArrayDelegateType()'" ) ; } delegateDeser = findDeserializer ( ctxt , delegateType , property ) ; } } Boolean unwrapSingle = findFormatFeature ( ctxt , property , Collection . class , JsonFormat . Feature . ACCEPT_SINGLE_VALUE_AS_ARRAY ) ; JsonDeserializer < ? > valueDeser = _valueDeserializer ; valueDeser = findConvertingContentDeserializer ( ctxt , property , valueDeser ) ; final JavaType vt = _collectionType . getContentType ( ) ; if ( valueDeser == null ) { valueDeser = ctxt . findContextualValueDeserializer ( vt , property ) ; } else { valueDeser = ctxt . handleSecondaryContextualization ( valueDeser , property , vt ) ; } TypeDeserializer valueTypeDeser = _valueTypeDeserializer ; if ( valueTypeDeser != null ) { valueTypeDeser = valueTypeDeser . forProperty ( property ) ; } return withResolved ( delegateDeser , valueDeser , valueTypeDeser , unwrapSingle ) ; }
JacksonDatabind
74
src/main/java/com/fasterxml/jackson/databind/jsontype/impl/AsPropertyTypeDeserializer.java
133
160
AsPropertyTypeDeserializer ignores DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT
The `AsPropertyTypeDeserializer ` implementation does not respect the `DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT` feature. When deserializing an empty String it throws `DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT` instead of creating a null Object.
@SuppressWarnings("resource") protected Object _deserializeTypedUsingDefaultImpl(JsonParser p, DeserializationContext ctxt, TokenBuffer tb) throws IOException { // As per [JACKSON-614], may have default implementation to use JsonDeserializer<Object> deser = _findDefaultImplDeserializer(ctxt); if (deser != null) { if (tb != null) { tb.writeEndObject(); p = tb.asParser(p); // must move to point to the first token: p.nextToken(); } return deser.deserialize(p, ctxt); } // or, perhaps we just bumped into a "natural" value (boolean/int/double/String)? Object result = TypeDeserializer.deserializeIfNatural(p, ctxt, _baseType); if (result != null) { return result; } // or, something for which "as-property" won't work, changed into "wrapper-array" type: if (p.getCurrentToken() == JsonToken.START_ARRAY) { return super.deserializeTypedFromAny(p, ctxt); } ctxt.reportWrongTokenException(p, JsonToken.FIELD_NAME, "missing property '"+_typePropertyName+"' that is to contain type id (for class "+baseTypeName()+")"); return null; }
@ SuppressWarnings ( "resource" ) protected Object _deserializeTypedUsingDefaultImpl ( JsonParser p , DeserializationContext ctxt , TokenBuffer tb ) throws IOException { JsonDeserializer < Object > deser = _findDefaultImplDeserializer ( ctxt ) ; if ( deser != null ) { if ( tb != null ) { tb . writeEndObject ( ) ; p = tb . asParser ( p ) ; p . nextToken ( ) ; } return deser . deserialize ( p , ctxt ) ; } Object result = TypeDeserializer . deserializeIfNatural ( p , ctxt , _baseType ) ; if ( result != null ) { return result ; } if ( p . getCurrentToken ( ) == JsonToken . START_ARRAY ) { return super . deserializeTypedFromAny ( p , ctxt ) ; } ctxt . reportWrongTokenException ( p , JsonToken . FIELD_NAME , "missing property '" + _typePropertyName + "' that is to contain type id (for class " + baseTypeName ( ) + ")" ) ; return null ; }
@SuppressWarnings("resource") protected Object _deserializeTypedUsingDefaultImpl(JsonParser p, DeserializationContext ctxt, TokenBuffer tb) throws IOException { // As per [JACKSON-614], may have default implementation to use JsonDeserializer<Object> deser = _findDefaultImplDeserializer(ctxt); if (deser != null) { if (tb != null) { tb.writeEndObject(); p = tb.asParser(p); // must move to point to the first token: p.nextToken(); } return deser.deserialize(p, ctxt); } // or, perhaps we just bumped into a "natural" value (boolean/int/double/String)? Object result = TypeDeserializer.deserializeIfNatural(p, ctxt, _baseType); if (result != null) { return result; } // or, something for which "as-property" won't work, changed into "wrapper-array" type: if (p.getCurrentToken() == JsonToken.START_ARRAY) { return super.deserializeTypedFromAny(p, ctxt); } else if (p.getCurrentToken() == JsonToken.VALUE_STRING) { if (ctxt.isEnabled(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT)) { String str = p.getText().trim(); if (str.isEmpty()) { return null; } } } ctxt.reportWrongTokenException(p, JsonToken.FIELD_NAME, "missing property '"+_typePropertyName+"' that is to contain type id (for class "+baseTypeName()+")"); return null; }
@ SuppressWarnings ( "resource" ) protected Object _deserializeTypedUsingDefaultImpl ( JsonParser p , DeserializationContext ctxt , TokenBuffer tb ) throws IOException { JsonDeserializer < Object > deser = _findDefaultImplDeserializer ( ctxt ) ; if ( deser != null ) { if ( tb != null ) { tb . writeEndObject ( ) ; p = tb . asParser ( p ) ; p . nextToken ( ) ; } return deser . deserialize ( p , ctxt ) ; } Object result = TypeDeserializer . deserializeIfNatural ( p , ctxt , _baseType ) ; if ( result != null ) { return result ; } if ( p . getCurrentToken ( ) == JsonToken . START_ARRAY ) { return super . deserializeTypedFromAny ( p , ctxt ) ; } else if ( p . getCurrentToken ( ) == JsonToken . VALUE_STRING ) { if ( ctxt . isEnabled ( DeserializationFeature . ACCEPT_EMPTY_STRING_AS_NULL_OBJECT ) ) { String str = p . getText ( ) . trim ( ) ; if ( str . isEmpty ( ) ) { return null ; } } } ctxt . reportWrongTokenException ( p , JsonToken . FIELD_NAME , "missing property '" + _typePropertyName + "' that is to contain type id (for class " + baseTypeName ( ) + ")" ) ; return null ; }
Math
11
src/main/java/org/apache/commons/math3/distribution/MultivariateNormalDistribution.java
177
186
MultivariateNormalDistribution.density(double[]) returns wrong value when the dimension is odd
To reproduce: {code} Assert.assertEquals(0.398942280401433, new MultivariateNormalDistribution(new double[]{0}, new double[][]{{1}}).density(new double[]{0}), 1e-15); {code}
public double density(final double[] vals) throws DimensionMismatchException { final int dim = getDimension(); if (vals.length != dim) { throw new DimensionMismatchException(vals.length, dim); } return FastMath.pow(2 * FastMath.PI, -dim / 2) * FastMath.pow(covarianceMatrixDeterminant, -0.5) * getExponentTerm(vals); }
public double density ( final double [ ] vals ) throws DimensionMismatchException { final int dim = getDimension ( ) ; if ( vals . length != dim ) { throw new DimensionMismatchException ( vals . length , dim ) ; } return FastMath . pow ( 2 * FastMath . PI , - dim / 2 ) * FastMath . pow ( covarianceMatrixDeterminant , - 0.5 ) * getExponentTerm ( vals ) ; }
public double density(final double[] vals) throws DimensionMismatchException { final int dim = getDimension(); if (vals.length != dim) { throw new DimensionMismatchException(vals.length, dim); } return FastMath.pow(2 * FastMath.PI, -0.5 * dim) * FastMath.pow(covarianceMatrixDeterminant, -0.5) * getExponentTerm(vals); }
public double density ( final double [ ] vals ) throws DimensionMismatchException { final int dim = getDimension ( ) ; if ( vals . length != dim ) { throw new DimensionMismatchException ( vals . length , dim ) ; } return FastMath . pow ( 2 * FastMath . PI , - 0.5 * dim ) * FastMath . pow ( covarianceMatrixDeterminant , - 0.5 ) * getExponentTerm ( vals ) ; }
Cli
10
src/java/org/apache/commons/cli/Parser.java
44
47
Missing required options not throwing MissingOptionException
When an Options object is used to parse a second set of command arguments it won't throw a MissingOptionException. {code:java} import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; public class Example { public static void main(String[] args) throws ParseException { brokenExample(); workingExample(); } // throws exception as expected private static void workingExample() throws ParseException { String[] args = {}; Options opts = new Options(); opts.addOption(OptionBuilder.isRequired().create('v')); GnuParser parser = new GnuParser(); CommandLine secondCL = parser.parse(opts, args); System.out.println("Done workingExample"); } // fails to throw exception on second invocation of parse private static void brokenExample() throws ParseException { String[] firstArgs = { "-v" }; String[] secondArgs = {}; Options opts = new Options(); opts.addOption(OptionBuilder.isRequired().create('v')); GnuParser parser = new GnuParser(); CommandLine firstCL = parser.parse(opts, firstArgs); CommandLine secondCL = parser.parse(opts, secondArgs); System.out.println("Done brokenExample"); } } {code} This is a result of the Options object returning the reference to its own list and the parsers modifying that list. The first call is removing the required options as they are found and subsequent calls get back an empty list.
protected void setOptions(final Options options) { this.options = options; this.requiredOptions = options.getRequiredOptions(); }
protected void setOptions ( final Options options ) { this . options = options ; this . requiredOptions = options . getRequiredOptions ( ) ; }
protected void setOptions(final Options options) { this.options = options; this.requiredOptions = new ArrayList(options.getRequiredOptions()); }
protected void setOptions ( final Options options ) { this . options = options ; this . requiredOptions = new ArrayList ( options . getRequiredOptions ( ) ) ; }
JacksonDatabind
54
src/main/java/com/fasterxml/jackson/databind/ser/PropertyBuilder.java
67
171
`Optional.empty()` not excluded if property declared with type `Object`
Jackson version is 2.6.6 **Here is the code:** ``` ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonInclude.Include.NON_ABSENT); mapper.registerModule(new Jdk8Module()); JsonResult result = new JsonResult(); result.setA(Optional.empty()); result.setB(Optional.empty()); System.out.println(mapper.writeValueAsString(result)); ``` ``` @Data public class JsonResult { private Object a; private Optional<Object> b; } ``` **Then I got the output: {"a":null}** **The real value of both is the same, why the results are different?** **How can I avoid null in such case?** By the way, I tried 'NON_EMPTY'. It can work, but it also ignores zero and empty array. I want to keep them.
@SuppressWarnings("deprecation") protected BeanPropertyWriter buildWriter(SerializerProvider prov, BeanPropertyDefinition propDef, JavaType declaredType, JsonSerializer<?> ser, TypeSerializer typeSer, TypeSerializer contentTypeSer, AnnotatedMember am, boolean defaultUseStaticTyping) throws JsonMappingException { // do we have annotation that forces type to use (to declared type or its super type)? JavaType serializationType = findSerializationType(am, defaultUseStaticTyping, declaredType); // Container types can have separate type serializers for content (value / element) type if (contentTypeSer != null) { /* 04-Feb-2010, tatu: Let's force static typing for collection, if there is * type information for contents. Should work well (for JAXB case); can be * revisited if this causes problems. */ if (serializationType == null) { // serializationType = TypeFactory.type(am.getGenericType(), _beanDesc.getType()); serializationType = declaredType; } JavaType ct = serializationType.getContentType(); // Not exactly sure why, but this used to occur; better check explicitly: if (ct == null) { throw new IllegalStateException("Problem trying to create BeanPropertyWriter for property '" +propDef.getName()+"' (of type "+_beanDesc.getType()+"); serialization type "+serializationType+" has no content"); } serializationType = serializationType.withContentTypeHandler(contentTypeSer); ct = serializationType.getContentType(); } Object valueToSuppress = null; boolean suppressNulls = false; JsonInclude.Value inclV = _defaultInclusion.withOverrides(propDef.findInclusion()); JsonInclude.Include inclusion = inclV.getValueInclusion(); if (inclusion == JsonInclude.Include.USE_DEFAULTS) { // should not occur but... inclusion = JsonInclude.Include.ALWAYS; } // 12-Jul-2016, tatu: [databind#1256] Need to make sure we consider type refinement JavaType actualType = (serializationType == null) ? declaredType : serializationType; switch (inclusion) { case NON_DEFAULT: // 11-Nov-2015, tatu: This is tricky because semantics differ between cases, // so that if enclosing class has this, we may need to values of property, // whereas for global defaults OR per-property overrides, we have more // static definition. Sigh. // First: case of class specifying it; try to find POJO property defaults if (_defaultInclusion.getValueInclusion() == JsonInclude.Include.NON_DEFAULT) { valueToSuppress = getPropertyDefaultValue(propDef.getName(), am, actualType); } else { valueToSuppress = getDefaultValue(actualType); } if (valueToSuppress == null) { suppressNulls = true; } else { if (valueToSuppress.getClass().isArray()) { valueToSuppress = ArrayBuilders.getArrayComparator(valueToSuppress); } } break; case NON_ABSENT: // new with 2.6, to support Guava/JDK8 Optionals // always suppress nulls suppressNulls = true; // and for referential types, also "empty", which in their case means "absent" if (declaredType.isReferenceType()) { valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; } break; case NON_EMPTY: // always suppress nulls suppressNulls = true; // but possibly also 'empty' values: valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; break; case NON_NULL: suppressNulls = true; // fall through case ALWAYS: // default default: // we may still want to suppress empty collections, as per [JACKSON-254]: if (declaredType.isContainerType() && !_config.isEnabled(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)) { valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; } break; } BeanPropertyWriter bpw = new BeanPropertyWriter(propDef, am, _beanDesc.getClassAnnotations(), declaredType, ser, typeSer, serializationType, suppressNulls, valueToSuppress); // How about custom null serializer? Object serDef = _annotationIntrospector.findNullSerializer(am); if (serDef != null) { bpw.assignNullSerializer(prov.serializerInstance(am, serDef)); } // And then, handling of unwrapping NameTransformer unwrapper = _annotationIntrospector.findUnwrappingNameTransformer(am); if (unwrapper != null) { bpw = bpw.unwrappingWriter(unwrapper); } return bpw; }
@ SuppressWarnings ( "deprecation" ) protected BeanPropertyWriter buildWriter ( SerializerProvider prov , BeanPropertyDefinition propDef , JavaType declaredType , JsonSerializer < ? > ser , TypeSerializer typeSer , TypeSerializer contentTypeSer , AnnotatedMember am , boolean defaultUseStaticTyping ) throws JsonMappingException { JavaType serializationType = findSerializationType ( am , defaultUseStaticTyping , declaredType ) ; if ( contentTypeSer != null ) { if ( serializationType == null ) { serializationType = declaredType ; } JavaType ct = serializationType . getContentType ( ) ; if ( ct == null ) { throw new IllegalStateException ( "Problem trying to create BeanPropertyWriter for property '" + propDef . getName ( ) + "' (of type " + _beanDesc . getType ( ) + "); serialization type " + serializationType + " has no content" ) ; } serializationType = serializationType . withContentTypeHandler ( contentTypeSer ) ; ct = serializationType . getContentType ( ) ; } Object valueToSuppress = null ; boolean suppressNulls = false ; JsonInclude . Value inclV = _defaultInclusion . withOverrides ( propDef . findInclusion ( ) ) ; JsonInclude . Include inclusion = inclV . getValueInclusion ( ) ; if ( inclusion == JsonInclude . Include . USE_DEFAULTS ) { inclusion = JsonInclude . Include . ALWAYS ; } JavaType actualType = ( serializationType == null ) ? declaredType : serializationType ; switch ( inclusion ) { case NON_DEFAULT : if ( _defaultInclusion . getValueInclusion ( ) == JsonInclude . Include . NON_DEFAULT ) { valueToSuppress = getPropertyDefaultValue ( propDef . getName ( ) , am , actualType ) ; } else { valueToSuppress = getDefaultValue ( actualType ) ; } if ( valueToSuppress == null ) { suppressNulls = true ; } else { if ( valueToSuppress . getClass ( ) . isArray ( ) ) { valueToSuppress = ArrayBuilders . getArrayComparator ( valueToSuppress ) ; } } break ; case NON_ABSENT : suppressNulls = true ; if ( declaredType . isReferenceType ( ) ) { valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; } break ; case NON_EMPTY : suppressNulls = true ; valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; break ; case NON_NULL : suppressNulls = true ; case ALWAYS : default : if ( declaredType . isContainerType ( ) && ! _config . isEnabled ( SerializationFeature . WRITE_EMPTY_JSON_ARRAYS ) ) { valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; } break ; } BeanPropertyWriter bpw = new BeanPropertyWriter ( propDef , am , _beanDesc . getClassAnnotations ( ) , declaredType , ser , typeSer , serializationType , suppressNulls , valueToSuppress ) ; Object serDef = _annotationIntrospector . findNullSerializer ( am ) ; if ( serDef != null ) { bpw . assignNullSerializer ( prov . serializerInstance ( am , serDef ) ) ; } NameTransformer unwrapper = _annotationIntrospector . findUnwrappingNameTransformer ( am ) ; if ( unwrapper != null ) { bpw = bpw . unwrappingWriter ( unwrapper ) ; } return bpw ; }
@SuppressWarnings("deprecation") protected BeanPropertyWriter buildWriter(SerializerProvider prov, BeanPropertyDefinition propDef, JavaType declaredType, JsonSerializer<?> ser, TypeSerializer typeSer, TypeSerializer contentTypeSer, AnnotatedMember am, boolean defaultUseStaticTyping) throws JsonMappingException { // do we have annotation that forces type to use (to declared type or its super type)? JavaType serializationType = findSerializationType(am, defaultUseStaticTyping, declaredType); // Container types can have separate type serializers for content (value / element) type if (contentTypeSer != null) { /* 04-Feb-2010, tatu: Let's force static typing for collection, if there is * type information for contents. Should work well (for JAXB case); can be * revisited if this causes problems. */ if (serializationType == null) { // serializationType = TypeFactory.type(am.getGenericType(), _beanDesc.getType()); serializationType = declaredType; } JavaType ct = serializationType.getContentType(); // Not exactly sure why, but this used to occur; better check explicitly: if (ct == null) { throw new IllegalStateException("Problem trying to create BeanPropertyWriter for property '" +propDef.getName()+"' (of type "+_beanDesc.getType()+"); serialization type "+serializationType+" has no content"); } serializationType = serializationType.withContentTypeHandler(contentTypeSer); ct = serializationType.getContentType(); } Object valueToSuppress = null; boolean suppressNulls = false; JsonInclude.Value inclV = _defaultInclusion.withOverrides(propDef.findInclusion()); JsonInclude.Include inclusion = inclV.getValueInclusion(); if (inclusion == JsonInclude.Include.USE_DEFAULTS) { // should not occur but... inclusion = JsonInclude.Include.ALWAYS; } // 12-Jul-2016, tatu: [databind#1256] Need to make sure we consider type refinement JavaType actualType = (serializationType == null) ? declaredType : serializationType; switch (inclusion) { case NON_DEFAULT: // 11-Nov-2015, tatu: This is tricky because semantics differ between cases, // so that if enclosing class has this, we may need to values of property, // whereas for global defaults OR per-property overrides, we have more // static definition. Sigh. // First: case of class specifying it; try to find POJO property defaults if (_defaultInclusion.getValueInclusion() == JsonInclude.Include.NON_DEFAULT) { valueToSuppress = getPropertyDefaultValue(propDef.getName(), am, actualType); } else { valueToSuppress = getDefaultValue(actualType); } if (valueToSuppress == null) { suppressNulls = true; } else { if (valueToSuppress.getClass().isArray()) { valueToSuppress = ArrayBuilders.getArrayComparator(valueToSuppress); } } break; case NON_ABSENT: // new with 2.6, to support Guava/JDK8 Optionals // always suppress nulls suppressNulls = true; // and for referential types, also "empty", which in their case means "absent" if (actualType.isReferenceType()) { valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; } break; case NON_EMPTY: // always suppress nulls suppressNulls = true; // but possibly also 'empty' values: valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; break; case NON_NULL: suppressNulls = true; // fall through case ALWAYS: // default default: // we may still want to suppress empty collections, as per [JACKSON-254]: if (actualType.isContainerType() && !_config.isEnabled(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS)) { valueToSuppress = BeanPropertyWriter.MARKER_FOR_EMPTY; } break; } BeanPropertyWriter bpw = new BeanPropertyWriter(propDef, am, _beanDesc.getClassAnnotations(), declaredType, ser, typeSer, serializationType, suppressNulls, valueToSuppress); // How about custom null serializer? Object serDef = _annotationIntrospector.findNullSerializer(am); if (serDef != null) { bpw.assignNullSerializer(prov.serializerInstance(am, serDef)); } // And then, handling of unwrapping NameTransformer unwrapper = _annotationIntrospector.findUnwrappingNameTransformer(am); if (unwrapper != null) { bpw = bpw.unwrappingWriter(unwrapper); } return bpw; }
@ SuppressWarnings ( "deprecation" ) protected BeanPropertyWriter buildWriter ( SerializerProvider prov , BeanPropertyDefinition propDef , JavaType declaredType , JsonSerializer < ? > ser , TypeSerializer typeSer , TypeSerializer contentTypeSer , AnnotatedMember am , boolean defaultUseStaticTyping ) throws JsonMappingException { JavaType serializationType = findSerializationType ( am , defaultUseStaticTyping , declaredType ) ; if ( contentTypeSer != null ) { if ( serializationType == null ) { serializationType = declaredType ; } JavaType ct = serializationType . getContentType ( ) ; if ( ct == null ) { throw new IllegalStateException ( "Problem trying to create BeanPropertyWriter for property '" + propDef . getName ( ) + "' (of type " + _beanDesc . getType ( ) + "); serialization type " + serializationType + " has no content" ) ; } serializationType = serializationType . withContentTypeHandler ( contentTypeSer ) ; ct = serializationType . getContentType ( ) ; } Object valueToSuppress = null ; boolean suppressNulls = false ; JsonInclude . Value inclV = _defaultInclusion . withOverrides ( propDef . findInclusion ( ) ) ; JsonInclude . Include inclusion = inclV . getValueInclusion ( ) ; if ( inclusion == JsonInclude . Include . USE_DEFAULTS ) { inclusion = JsonInclude . Include . ALWAYS ; } JavaType actualType = ( serializationType == null ) ? declaredType : serializationType ; switch ( inclusion ) { case NON_DEFAULT : if ( _defaultInclusion . getValueInclusion ( ) == JsonInclude . Include . NON_DEFAULT ) { valueToSuppress = getPropertyDefaultValue ( propDef . getName ( ) , am , actualType ) ; } else { valueToSuppress = getDefaultValue ( actualType ) ; } if ( valueToSuppress == null ) { suppressNulls = true ; } else { if ( valueToSuppress . getClass ( ) . isArray ( ) ) { valueToSuppress = ArrayBuilders . getArrayComparator ( valueToSuppress ) ; } } break ; case NON_ABSENT : suppressNulls = true ; if ( actualType . isReferenceType ( ) ) { valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; } break ; case NON_EMPTY : suppressNulls = true ; valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; break ; case NON_NULL : suppressNulls = true ; case ALWAYS : default : if ( actualType . isContainerType ( ) && ! _config . isEnabled ( SerializationFeature . WRITE_EMPTY_JSON_ARRAYS ) ) { valueToSuppress = BeanPropertyWriter . MARKER_FOR_EMPTY ; } break ; } BeanPropertyWriter bpw = new BeanPropertyWriter ( propDef , am , _beanDesc . getClassAnnotations ( ) , declaredType , ser , typeSer , serializationType , suppressNulls , valueToSuppress ) ; Object serDef = _annotationIntrospector . findNullSerializer ( am ) ; if ( serDef != null ) { bpw . assignNullSerializer ( prov . serializerInstance ( am , serDef ) ) ; } NameTransformer unwrapper = _annotationIntrospector . findUnwrappingNameTransformer ( am ) ; if ( unwrapper != null ) { bpw = bpw . unwrappingWriter ( unwrapper ) ; } return bpw ; }
Math
89
src/java/org/apache/commons/math/stat/Frequency.java
108
111
Bugs in Frequency API
I think the existing Frequency API has some bugs in it. The addValue(Object v) method allows one to add a plain Object, but one cannot add anything further to the instance, as the second add fails with IllegalArgumentException. In fact, the problem is with the first call to addValue(Object) which should not allow a plain Object to be added - it should only allow Comparable objects. This could be fixed by checking that the object is Comparable. Similar considerations apply to the getCumFreq(Object) and getCumPct(Object) methods - they will only work with objects that implement Comparable. The getCount(Object) and getPct(Object) methods don't fail when given a non-Comparable object (because the class cast exception is caught), however they just return 0 as if the object was not present: {code} final Object OBJ = new Object(); f.addValue(OBJ); // This ought to fail, but doesn't, causing the unexpected behaviour below System.out.println(f.getCount(OBJ)); // 0 System.out.println(f.getPct(OBJ)); // 0.0 {code} Rather than adding extra checks for Comparable, it seems to me that the API would be much improved by using Comparable instead of Object. Also, it should make it easier to implement generics. However, this would cause compilation failures for some programs that pass Object rather than Comparable to the class. These would need recoding, but I think they would continue to run OK against the new API. It would also affect the run-time behaviour slightly, as the first attempt to add a non-Comparable object would fail, rather than the second add of a possibly valid object. But is that a viable program? It can only add one object, and any attempt to get statistics will either return 0 or an Exception, and applying the instanceof fix would also cause it to fail.
@Deprecated public void addValue(Object v) { addValue((Comparable<?>) v); }
@ Deprecated public void addValue ( Object v ) { addValue ( ( Comparable < ? > ) v ) ; }
@Deprecated public void addValue(Object v) { if (v instanceof Comparable<?>){ addValue((Comparable<?>) v); } else { throw new IllegalArgumentException("Object must implement Comparable"); } }
@ Deprecated public void addValue ( Object v ) { if ( v instanceof Comparable < ? > ) { addValue ( ( Comparable < ? > ) v ) ; } else { throw new IllegalArgumentException ( "Object must implement Comparable" ) ; } }
Mockito
1
src/org/mockito/internal/invocation/InvocationMatcher.java
120
150
ArgumentCaptor no longer working for varargs
I ran into the issue described here: http://stackoverflow.com/questions/27303562/why-does-upgrading-mockito-from-1-9-5-to-1-10-8-break-this-captor
public void captureArgumentsFrom(Invocation invocation) { if (invocation.getMethod().isVarArgs()) { int indexOfVararg = invocation.getRawArguments().length - 1; throw new UnsupportedOperationException(); } else { for (int position = 0; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } } // for (int position = 0; position < matchers.size(); position++) { // Matcher m = matchers.get(position); // if (m instanceof CapturesArguments && invocation.getRawArguments().length > position) { // //TODO SF - this whole lot can be moved captureFrom implementation // if(isVariableArgument(invocation, position) && isVarargMatcher(m)) { // Object array = invocation.getRawArguments()[position]; // for (int i = 0; i < Array.getLength(array); i++) { // ((CapturesArguments) m).captureFrom(Array.get(array, i)); // } // //since we've captured all varargs already, it does not make sense to process other matchers. // return; // } else { // ((CapturesArguments) m).captureFrom(invocation.getRawArguments()[position]); // } // } // } }
public void captureArgumentsFrom ( Invocation invocation ) { if ( invocation . getMethod ( ) . isVarArgs ( ) ) { int indexOfVararg = invocation . getRawArguments ( ) . length - 1 ; throw new UnsupportedOperationException ( ) ; } else { for ( int position = 0 ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } } }
public void captureArgumentsFrom(Invocation invocation) { if (invocation.getMethod().isVarArgs()) { int indexOfVararg = invocation.getRawArguments().length - 1; for (int position = 0; position < indexOfVararg; position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } for (int position = indexOfVararg; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getRawArguments()[position - indexOfVararg]); } } } else { for (int position = 0; position < matchers.size(); position++) { Matcher m = matchers.get(position); if (m instanceof CapturesArguments) { ((CapturesArguments) m).captureFrom(invocation.getArgumentAt(position, Object.class)); } } } // for (int position = 0; position < matchers.size(); position++) { // Matcher m = matchers.get(position); // if (m instanceof CapturesArguments && invocation.getRawArguments().length > position) { // //TODO SF - this whole lot can be moved captureFrom implementation // if(isVariableArgument(invocation, position) && isVarargMatcher(m)) { // Object array = invocation.getRawArguments()[position]; // for (int i = 0; i < Array.getLength(array); i++) { // ((CapturesArguments) m).captureFrom(Array.get(array, i)); // } // //since we've captured all varargs already, it does not make sense to process other matchers. // return; // } else { // ((CapturesArguments) m).captureFrom(invocation.getRawArguments()[position]); // } // } // } }
public void captureArgumentsFrom ( Invocation invocation ) { if ( invocation . getMethod ( ) . isVarArgs ( ) ) { int indexOfVararg = invocation . getRawArguments ( ) . length - 1 ; for ( int position = 0 ; position < indexOfVararg ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } for ( int position = indexOfVararg ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getRawArguments ( ) [ position - indexOfVararg ] ) ; } } } else { for ( int position = 0 ; position < matchers . size ( ) ; position ++ ) { Matcher m = matchers . get ( position ) ; if ( m instanceof CapturesArguments ) { ( ( CapturesArguments ) m ) . captureFrom ( invocation . getArgumentAt ( position , Object . class ) ) ; } } } }
JacksonDatabind
110
src/main/java/com/fasterxml/jackson/databind/deser/impl/JavaUtilCollectionsDeserializers.java
64
86
Inconsistent handling of Collections$UnmodifiableList VS Collections$UnmodifiableRandomAccessList
I'm sorry to bring that one up again, but I'm under the impression that the issue about unmodifiable collections (https://github.com/FasterXML/jackson-databind/issues/1880) is still not solved completely. In fact, the way the `CLASS_UNMODIFIABLE_LIST` is retrieved [here](https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/deser/impl/JavaUtilCollectionsDeserializers.java#L52) yields `Collections$UnmodifiableRandomAccessList`, and therefore only this type is currently supported by Jackson 2.9.8. However, using `Collections.unmodifiableList()` on a `List` implementation that doesn't implement `RandomAccess` will yield a `Collections$UnmodifiableList` instead, which is not deserialized properly and fails with: ``` com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot construct instance of `java.util.Collections$UnmodifiableList` (no Creators, like default constructor, exist): no default no-arguments constructor found ``` This can be reproduced by adding the following test case in `TestDefaultForUtilCollections1868`: ```java public void testUnmodifiableNonRandomAccessList() throws Exception { _verifyCollection(Collections.unmodifiableList(new LinkedList<>(Arrays.asList("first", "second")))); } ``` Or more generally for outside the project: ```java public void testUnmodifiableNonRandomAccessList() throws Exception { Collection<?> exp = Collections.unmodifiableList(new LinkedList<>(Arrays.asList("first", "second"))); ObjectMapper mapper = new ObjectMapper(); mapper.enableDefaultTyping(DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); String json = mapper.writeValueAsString(exp); Collection<?> act = mapper.readValue(json, Collection.class); assertEquals(exp, act); assertEquals(exp.getClass(), act.getClass()); } ``` Currently `java.util.Collections.unmodifiableList()` can only return these 2 types of unmodifiable lists, so I believe it is safe for now to just hardcode yet another special case for this class. This can currently be solved on user side by adding a mixin, but since `Collections$UnmodifiableRandomAccessList` is supported, I would find it natural to also support the non-random access variant.
public static JsonDeserializer<?> findForCollection(DeserializationContext ctxt, JavaType type) throws JsonMappingException { JavaUtilCollectionsConverter conv; // 10-Jan-2017, tatu: Some types from `java.util.Collections`/`java.util.Arrays` need bit of help... if (type.hasRawClass(CLASS_AS_ARRAYS_LIST)) { conv = converter(TYPE_AS_LIST, type, List.class); } else if (type.hasRawClass(CLASS_SINGLETON_LIST)) { conv = converter(TYPE_SINGLETON_LIST, type, List.class); } else if (type.hasRawClass(CLASS_SINGLETON_SET)) { conv = converter(TYPE_SINGLETON_SET, type, Set.class); // [databind#2265]: we may have another impl type for unmodifiable Lists, check both } else if (type.hasRawClass(CLASS_UNMODIFIABLE_LIST)) { conv = converter(TYPE_UNMODIFIABLE_LIST, type, List.class); } else if (type.hasRawClass(CLASS_UNMODIFIABLE_SET)) { conv = converter(TYPE_UNMODIFIABLE_SET, type, Set.class); } else { return null; } return new StdDelegatingDeserializer<Object>(conv); }
public static JsonDeserializer < ? > findForCollection ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { JavaUtilCollectionsConverter conv ; if ( type . hasRawClass ( CLASS_AS_ARRAYS_LIST ) ) { conv = converter ( TYPE_AS_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_SINGLETON_LIST ) ) { conv = converter ( TYPE_SINGLETON_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_SINGLETON_SET ) ) { conv = converter ( TYPE_SINGLETON_SET , type , Set . class ) ; } else if ( type . hasRawClass ( CLASS_UNMODIFIABLE_LIST ) ) { conv = converter ( TYPE_UNMODIFIABLE_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_UNMODIFIABLE_SET ) ) { conv = converter ( TYPE_UNMODIFIABLE_SET , type , Set . class ) ; } else { return null ; } return new StdDelegatingDeserializer < Object > ( conv ) ; }
public static JsonDeserializer<?> findForCollection(DeserializationContext ctxt, JavaType type) throws JsonMappingException { JavaUtilCollectionsConverter conv; // 10-Jan-2017, tatu: Some types from `java.util.Collections`/`java.util.Arrays` need bit of help... if (type.hasRawClass(CLASS_AS_ARRAYS_LIST)) { conv = converter(TYPE_AS_LIST, type, List.class); } else if (type.hasRawClass(CLASS_SINGLETON_LIST)) { conv = converter(TYPE_SINGLETON_LIST, type, List.class); } else if (type.hasRawClass(CLASS_SINGLETON_SET)) { conv = converter(TYPE_SINGLETON_SET, type, Set.class); // [databind#2265]: we may have another impl type for unmodifiable Lists, check both } else if (type.hasRawClass(CLASS_UNMODIFIABLE_LIST) || type.hasRawClass(CLASS_UNMODIFIABLE_LIST_ALIAS)) { conv = converter(TYPE_UNMODIFIABLE_LIST, type, List.class); } else if (type.hasRawClass(CLASS_UNMODIFIABLE_SET)) { conv = converter(TYPE_UNMODIFIABLE_SET, type, Set.class); } else { return null; } return new StdDelegatingDeserializer<Object>(conv); }
public static JsonDeserializer < ? > findForCollection ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { JavaUtilCollectionsConverter conv ; if ( type . hasRawClass ( CLASS_AS_ARRAYS_LIST ) ) { conv = converter ( TYPE_AS_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_SINGLETON_LIST ) ) { conv = converter ( TYPE_SINGLETON_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_SINGLETON_SET ) ) { conv = converter ( TYPE_SINGLETON_SET , type , Set . class ) ; } else if ( type . hasRawClass ( CLASS_UNMODIFIABLE_LIST ) || type . hasRawClass ( CLASS_UNMODIFIABLE_LIST_ALIAS ) ) { conv = converter ( TYPE_UNMODIFIABLE_LIST , type , List . class ) ; } else if ( type . hasRawClass ( CLASS_UNMODIFIABLE_SET ) ) { conv = converter ( TYPE_UNMODIFIABLE_SET , type , Set . class ) ; } else { return null ; } return new StdDelegatingDeserializer < Object > ( conv ) ; }
Math
31
src/main/java/org/apache/commons/math3/util/ContinuedFraction.java
123
199
inverseCumulativeProbability of BinomialDistribution returns wrong value for large trials.
The inverseCumulativeProbability method of the BinomialDistributionImpl class returns wrong value for large trials. Following code will be reproduce the problem. {{System.out.println(new BinomialDistributionImpl(1000000, 0.5).inverseCumulativeProbability(0.5));}} This returns 499525, though it should be 499999. I'm not sure how it should be fixed, but the cause is that the cumulativeProbability method returns Infinity, not NaN. As the result the checkedCumulativeProbability method doesn't work as expected.
public double evaluate(double x, double epsilon, int maxIterations) { final double small = 1e-50; double hPrev = getA(0, x); // use the value of small as epsilon criteria for zero checks if (Precision.equals(hPrev, 0.0, small)) { hPrev = small; } int n = 1; double dPrev = 0.0; double p0 = 1.0; double q1 = 1.0; double cPrev = hPrev; double hN = hPrev; while (n < maxIterations) { final double a = getA(n, x); final double b = getB(n, x); double cN = a * hPrev + b * p0; double q2 = a * q1 + b * dPrev; if (Double.isInfinite(cN) || Double.isInfinite(q2)) { double scaleFactor = 1d; double lastScaleFactor = 1d; final int maxPower = 5; final double scale = FastMath.max(a,b); if (scale <= 0) { // Can't scale throw new ConvergenceException(LocalizedFormats.CONTINUED_FRACTION_INFINITY_DIVERGENCE, x); } for (int i = 0; i < maxPower; i++) { lastScaleFactor = scaleFactor; scaleFactor *= scale; if (a != 0.0 && a > b) { cN = hPrev / lastScaleFactor + (b / scaleFactor * p0); q2 = q1 / lastScaleFactor + (b / scaleFactor * dPrev); } else if (b != 0) { cN = (a / scaleFactor * hPrev) + p0 / lastScaleFactor; q2 = (a / scaleFactor * q1) + dPrev / lastScaleFactor; } if (!(Double.isInfinite(cN) || Double.isInfinite(q2))) { break; } } } final double deltaN = cN / q2 / cPrev; hN = cPrev * deltaN; if (Double.isInfinite(hN)) { throw new ConvergenceException(LocalizedFormats.CONTINUED_FRACTION_INFINITY_DIVERGENCE, x); } if (Double.isNaN(hN)) { throw new ConvergenceException(LocalizedFormats.CONTINUED_FRACTION_NAN_DIVERGENCE, x); } if (FastMath.abs(deltaN - 1.0) < epsilon) { break; } dPrev = q1; cPrev = cN / q2; p0 = hPrev; hPrev = cN; q1 = q2; n++; } if (n >= maxIterations) { throw new MaxCountExceededException(LocalizedFormats.NON_CONVERGENT_CONTINUED_FRACTION, maxIterations, x); } return hN; }
public double evaluate ( double x , double epsilon , int maxIterations ) { final double small = 1e-50 ; double hPrev = getA ( 0 , x ) ; if ( Precision . equals ( hPrev , 0.0 , small ) ) { hPrev = small ; } int n = 1 ; double dPrev = 0.0 ; double p0 = 1.0 ; double q1 = 1.0 ; double cPrev = hPrev ; double hN = hPrev ; while ( n < maxIterations ) { final double a = getA ( n , x ) ; final double b = getB ( n , x ) ; double cN = a * hPrev + b * p0 ; double q2 = a * q1 + b * dPrev ; if ( Double . isInfinite ( cN ) || Double . isInfinite ( q2 ) ) { double scaleFactor = 1d ; double lastScaleFactor = 1d ; final int maxPower = 5 ; final double scale = FastMath . max ( a , b ) ; if ( scale <= 0 ) { throw new ConvergenceException ( LocalizedFormats . CONTINUED_FRACTION_INFINITY_DIVERGENCE , x ) ; } for ( int i = 0 ; i < maxPower ; i ++ ) { lastScaleFactor = scaleFactor ; scaleFactor *= scale ; if ( a != 0.0 && a > b ) { cN = hPrev / lastScaleFactor + ( b / scaleFactor * p0 ) ; q2 = q1 / lastScaleFactor + ( b / scaleFactor * dPrev ) ; } else if ( b != 0 ) { cN = ( a / scaleFactor * hPrev ) + p0 / lastScaleFactor ; q2 = ( a / scaleFactor * q1 ) + dPrev / lastScaleFactor ; } if ( ! ( Double . isInfinite ( cN ) || Double . isInfinite ( q2 ) ) ) { break ; } } } final double deltaN = cN / q2 / cPrev ; hN = cPrev * deltaN ; if ( Double . isInfinite ( hN ) ) { throw new ConvergenceException ( LocalizedFormats . CONTINUED_FRACTION_INFINITY_DIVERGENCE , x ) ; } if ( Double . isNaN ( hN ) ) { throw new ConvergenceException ( LocalizedFormats . CONTINUED_FRACTION_NAN_DIVERGENCE , x ) ; } if ( FastMath . abs ( deltaN - 1.0 ) < epsilon ) { break ; } dPrev = q1 ; cPrev = cN / q2 ; p0 = hPrev ; hPrev = cN ; q1 = q2 ; n ++ ; } if ( n >= maxIterations ) { throw new MaxCountExceededException ( LocalizedFormats . NON_CONVERGENT_CONTINUED_FRACTION , maxIterations , x ) ; } return hN ; }
public double evaluate(double x, double epsilon, int maxIterations) { final double small = 1e-50; double hPrev = getA(0, x); // use the value of small as epsilon criteria for zero checks if (Precision.equals(hPrev, 0.0, small)) { hPrev = small; } int n = 1; double dPrev = 0.0; double cPrev = hPrev; double hN = hPrev; while (n < maxIterations) { final double a = getA(n, x); final double b = getB(n, x); double dN = a + b * dPrev; if (Precision.equals(dN, 0.0, small)) { dN = small; } double cN = a + b / cPrev; if (Precision.equals(cN, 0.0, small)) { cN = small; } dN = 1 / dN; final double deltaN = cN * dN; hN = hPrev * deltaN; if (Double.isInfinite(hN)) { throw new ConvergenceException(LocalizedFormats.CONTINUED_FRACTION_INFINITY_DIVERGENCE, x); } if (Double.isNaN(hN)) { throw new ConvergenceException(LocalizedFormats.CONTINUED_FRACTION_NAN_DIVERGENCE, x); } if (FastMath.abs(deltaN - 1.0) < epsilon) { break; } dPrev = dN; cPrev = cN; hPrev = hN; n++; } if (n >= maxIterations) { throw new MaxCountExceededException(LocalizedFormats.NON_CONVERGENT_CONTINUED_FRACTION, maxIterations, x); } return hN; }
public double evaluate ( double x , double epsilon , int maxIterations ) { final double small = 1e-50 ; double hPrev = getA ( 0 , x ) ; if ( Precision . equals ( hPrev , 0.0 , small ) ) { hPrev = small ; } int n = 1 ; double dPrev = 0.0 ; double cPrev = hPrev ; double hN = hPrev ; while ( n < maxIterations ) { final double a = getA ( n , x ) ; final double b = getB ( n , x ) ; double dN = a + b * dPrev ; if ( Precision . equals ( dN , 0.0 , small ) ) { dN = small ; } double cN = a + b / cPrev ; if ( Precision . equals ( cN , 0.0 , small ) ) { cN = small ; } dN = 1 / dN ; final double deltaN = cN * dN ; hN = hPrev * deltaN ; if ( Double . isInfinite ( hN ) ) { throw new ConvergenceException ( LocalizedFormats . CONTINUED_FRACTION_INFINITY_DIVERGENCE , x ) ; } if ( Double . isNaN ( hN ) ) { throw new ConvergenceException ( LocalizedFormats . CONTINUED_FRACTION_NAN_DIVERGENCE , x ) ; } if ( FastMath . abs ( deltaN - 1.0 ) < epsilon ) { break ; } dPrev = dN ; cPrev = cN ; hPrev = hN ; n ++ ; } if ( n >= maxIterations ) { throw new MaxCountExceededException ( LocalizedFormats . NON_CONVERGENT_CONTINUED_FRACTION , maxIterations , x ) ; } return hN ; }
Compress
28
src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
568
592
TarArchiveInputStream silently finished when unexpected EOF occured
I just found the following test case didn't raise an IOException as it used to be for a *tar trimmed on purpose* @Test public void testCorruptedBzip2() throws IOException { String archivePath = PathUtil.join(testdataDir, "test.tar.bz2"); TarArchiveInputStream input = null; input = new TarArchiveInputStream(new BZip2CompressorInputStream( GoogleFile.SYSTEM.newInputStream(archivePath), true)); ArchiveEntry nextMatchedEntry = input.getNextEntry(); while (nextMatchedEntry != null) { logger.infofmt("Extracting %s", nextMatchedEntry.getName()); String outputPath = PathUtil.join("/tmp/", nextMatchedEntry.getName()); OutputStream out = new FileOutputStream(outputPath); ByteStreams.copy(input, out); out.close(); nextMatchedEntry = input.getNextEntry(); } }
@Override public int read(byte[] buf, int offset, int numToRead) throws IOException { int totalRead = 0; if (hasHitEOF || entryOffset >= entrySize) { return -1; } if (currEntry == null) { throw new IllegalStateException("No current tar entry"); } numToRead = Math.min(numToRead, available()); totalRead = is.read(buf, offset, numToRead); count(totalRead); if (totalRead == -1) { hasHitEOF = true; } else { entryOffset += totalRead; } return totalRead; }
@ Override public int read ( byte [ ] buf , int offset , int numToRead ) throws IOException { int totalRead = 0 ; if ( hasHitEOF || entryOffset >= entrySize ) { return - 1 ; } if ( currEntry == null ) { throw new IllegalStateException ( "No current tar entry" ) ; } numToRead = Math . min ( numToRead , available ( ) ) ; totalRead = is . read ( buf , offset , numToRead ) ; count ( totalRead ) ; if ( totalRead == - 1 ) { hasHitEOF = true ; } else { entryOffset += totalRead ; } return totalRead ; }
@Override public int read(byte[] buf, int offset, int numToRead) throws IOException { int totalRead = 0; if (hasHitEOF || entryOffset >= entrySize) { return -1; } if (currEntry == null) { throw new IllegalStateException("No current tar entry"); } numToRead = Math.min(numToRead, available()); totalRead = is.read(buf, offset, numToRead); if (totalRead == -1) { if (numToRead > 0) { throw new IOException("Truncated TAR archive"); } hasHitEOF = true; } else { count(totalRead); entryOffset += totalRead; } return totalRead; }
@ Override public int read ( byte [ ] buf , int offset , int numToRead ) throws IOException { int totalRead = 0 ; if ( hasHitEOF || entryOffset >= entrySize ) { return - 1 ; } if ( currEntry == null ) { throw new IllegalStateException ( "No current tar entry" ) ; } numToRead = Math . min ( numToRead , available ( ) ) ; totalRead = is . read ( buf , offset , numToRead ) ; if ( totalRead == - 1 ) { if ( numToRead > 0 ) { throw new IOException ( "Truncated TAR archive" ) ; } hasHitEOF = true ; } else { count ( totalRead ) ; entryOffset += totalRead ; } return totalRead ; }
Cli
26
src/java/org/apache/commons/cli/OptionBuilder.java
346
364
OptionBuilder is not reseted in case of an IAE at create
If the call to OptionBuilder.create() fails with an IllegalArgumentException, the OptionBuilder is not resetted and its next usage may contain unwanted settings. Actually this let the CLI-1.2 RCs fail on IBM JDK 6 running on Maven 2.0.10.
public static Option create(String opt) throws IllegalArgumentException { // create the option Option option = new Option(opt, description); // set the option properties option.setLongOpt(longopt); option.setRequired(required); option.setOptionalArg(optionalArg); option.setArgs(numberOfArgs); option.setType(type); option.setValueSeparator(valuesep); option.setArgName(argName); // reset the OptionBuilder properties OptionBuilder.reset(); // return the Option instance return option; }
public static Option create ( String opt ) throws IllegalArgumentException { Option option = new Option ( opt , description ) ; option . setLongOpt ( longopt ) ; option . setRequired ( required ) ; option . setOptionalArg ( optionalArg ) ; option . setArgs ( numberOfArgs ) ; option . setType ( type ) ; option . setValueSeparator ( valuesep ) ; option . setArgName ( argName ) ; OptionBuilder . reset ( ) ; return option ; }
public static Option create(String opt) throws IllegalArgumentException { Option option = null; try { // create the option option = new Option(opt, description); // set the option properties option.setLongOpt(longopt); option.setRequired(required); option.setOptionalArg(optionalArg); option.setArgs(numberOfArgs); option.setType(type); option.setValueSeparator(valuesep); option.setArgName(argName); } finally { // reset the OptionBuilder properties OptionBuilder.reset(); } // return the Option instance return option; }
public static Option create ( String opt ) throws IllegalArgumentException { Option option = null ; try { option = new Option ( opt , description ) ; option . setLongOpt ( longopt ) ; option . setRequired ( required ) ; option . setOptionalArg ( optionalArg ) ; option . setArgs ( numberOfArgs ) ; option . setType ( type ) ; option . setValueSeparator ( valuesep ) ; option . setArgName ( argName ) ; } finally { OptionBuilder . reset ( ) ; } return option ; }
Compress
7
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
93
105
TarUtils.parseName does not properly handle characters outside the range 0-127
if a tarfile contains files with special characters, the names of the tar entries are wrong. example: correct name: 0302-0601-3±±±F06±W220±ZB±LALALA±±±±±±±±±±CAN±±DC±±±04±060302±MOE.model name resolved by TarUtils.parseName: 0302-0101-3ᄆᄆᄆF06ᄆW220ᄆZBᄆHECKMODULᄆᄆᄆᄆᄆᄆᄆᄆᄆᄆECEᄆᄆDCᄆᄆᄆ07ᄆ060302ᄆDOERN.model please use: result.append(new String(new byte[] { buffer[i] })); instead of: result.append((char) buffer[i]); to solve this encoding problem.
public static String parseName(byte[] buffer, final int offset, final int length) { StringBuffer result = new StringBuffer(length); int end = offset + length; for (int i = offset; i < end; ++i) { if (buffer[i] == 0) { break; } result.append((char) buffer[i]); } return result.toString(); }
public static String parseName ( byte [ ] buffer , final int offset , final int length ) { StringBuffer result = new StringBuffer ( length ) ; int end = offset + length ; for ( int i = offset ; i < end ; ++ i ) { if ( buffer [ i ] == 0 ) { break ; } result . append ( ( char ) buffer [ i ] ) ; } return result . toString ( ) ; }
public static String parseName(byte[] buffer, final int offset, final int length) { StringBuffer result = new StringBuffer(length); int end = offset + length; for (int i = offset; i < end; ++i) { byte b = buffer[i]; if (b == 0) { // Trailing null break; } result.append((char) (b & 0xFF)); // Allow for sign-extension } return result.toString(); }
public static String parseName ( byte [ ] buffer , final int offset , final int length ) { StringBuffer result = new StringBuffer ( length ) ; int end = offset + length ; for ( int i = offset ; i < end ; ++ i ) { byte b = buffer [ i ] ; if ( b == 0 ) { break ; } result . append ( ( char ) ( b & 0xFF ) ) ; } return result . toString ( ) ; }
Math
27
src/main/java/org/apache/commons/math3/fraction/Fraction.java
596
598
Fraction percentageValue rare overflow
The percentageValue() method of the Fraction class works by first multiplying the Fraction by 100, then converting the Fraction to a double. This causes overflows when the numerator is greater than Integer.MAX_VALUE/100, even when the value of the fraction is far below this value. The patch changes the method to first convert to a double value, and then multiply this value by 100 - the result should be the same, but with less overflows. An addition to the test for the method that covers this bug is also included.
public double percentageValue() { return multiply(100).doubleValue(); }
public double percentageValue ( ) { return multiply ( 100 ) . doubleValue ( ) ; }
public double percentageValue() { return 100 * doubleValue(); }
public double percentageValue ( ) { return 100 * doubleValue ( ) ; }
Mockito
38
src/org/mockito/internal/verification/argumentmatching/ArgumentMatchingTool.java
47
49
Mockito's sources in the central Maven repository should be in a *-sources.jar file
Right now, in http://repo1.maven.org/maven2/org/mockito/, the binaries and sources of Mockito are packed in the same JAR file (e.g. mockito-all-1.7.jar). This is against the Maven conventions. As a result, at least IntelliJ IDEA does not automatically detect the sources (i.e. no quick access to documentation, no seeing of method parameter names). The convention in Maven is to have the binaries in one JAR, the sources in another JAR whose name is prefixed "-sources" and the javadocs in a third JAR prefixed "-javadoc". See http:// maven.apache.org/guides/mini/guide-central-repository- upload.html#Manual_upload_of_artifacts The expected way to organize the files is: mockito-all-X.Y.jar -- the .class files and everything that is required at runtime mockito-all-X.Y-sources.jar -- the .java files and other sources mockito-all-X.Y-javadoc.jar -- the javadocs (I don't use them, but maybe someone else likes them) Organizing them like this makes the library compatible with all Maven users. An added benefit is that the binaries JAR becomes about 50% smaller, because the source code will be in a separate file.
private boolean toStringEquals(Matcher m, Object arg) { return StringDescription.toString(m).equals(arg.toString()); }
private boolean toStringEquals ( Matcher m , Object arg ) { return StringDescription . toString ( m ) . equals ( arg . toString ( ) ) ; }
private boolean toStringEquals(Matcher m, Object arg) { return StringDescription.toString(m).equals(arg == null? "null" : arg.toString()); }
private boolean toStringEquals ( Matcher m , Object arg ) { return StringDescription . toString ( m ) . equals ( arg == null ? "null" : arg . toString ( ) ) ; }
JacksonDatabind
42
src/main/java/com/fasterxml/jackson/databind/deser/std/FromStringDeserializer.java
277
285
Serializing and Deserializing Locale.ROOT
Serializing and Deserializing Locale objects seems to work just fine, until you try on the Root Locale. It writes it out as an empty string and when it reads it in, the value is null ``` @Test public void testLocaleDeserialization() throws IOException { ObjectMapper objectMapper = new ObjectMapper(); Locale root = Locale.ROOT; String json = objectMapper.writeValueAsString(root); System.out.printf("Root Locale: '%s'", json); Locale actual = objectMapper.readValue(json, Locale.class); Assert.assertEquals(root, actual); } ``` Here is the output: Root Locale: '""' java.lang.AssertionError: Expected : Actual :null
@Override protected Object _deserializeFromEmptyString() throws IOException { // As per [databind#398], URI requires special handling if (_kind == STD_URI) { return URI.create(""); } // As per [databind#1123], Locale too return super._deserializeFromEmptyString(); }
@ Override protected Object _deserializeFromEmptyString ( ) throws IOException { if ( _kind == STD_URI ) { return URI . create ( "" ) ; } return super . _deserializeFromEmptyString ( ) ; }
@Override protected Object _deserializeFromEmptyString() throws IOException { // As per [databind#398], URI requires special handling if (_kind == STD_URI) { return URI.create(""); } // As per [databind#1123], Locale too if (_kind == STD_LOCALE) { return Locale.ROOT; } return super._deserializeFromEmptyString(); }
@ Override protected Object _deserializeFromEmptyString ( ) throws IOException { if ( _kind == STD_URI ) { return URI . create ( "" ) ; } if ( _kind == STD_LOCALE ) { return Locale . ROOT ; } return super . _deserializeFromEmptyString ( ) ; }
Math
70
src/main/java/org/apache/commons/math/analysis/solvers/BisectionSolver.java
70
73
BisectionSolver.solve(final UnivariateRealFunction f, double min, double max, double initial) throws NullPointerException
Method BisectionSolver.solve(final UnivariateRealFunction f, double min, double max, double initial) invokes BisectionSolver.solve(double min, double max) which throws NullPointerException, as member variable UnivariateRealSolverImpl.f is null. Instead the method: BisectionSolver.solve(final UnivariateRealFunction f, double min, double max) should be called. Steps to reproduce: invoke: new BisectionSolver().solve(someUnivariateFunctionImpl, 0.0, 1.0, 0.5); NullPointerException will be thrown.
public double solve(final UnivariateRealFunction f, double min, double max, double initial) throws MaxIterationsExceededException, FunctionEvaluationException { return solve(min, max); }
public double solve ( final UnivariateRealFunction f , double min , double max , double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { return solve ( min , max ) ; }
public double solve(final UnivariateRealFunction f, double min, double max, double initial) throws MaxIterationsExceededException, FunctionEvaluationException { return solve(f, min, max); }
public double solve ( final UnivariateRealFunction f , double min , double max , double initial ) throws MaxIterationsExceededException , FunctionEvaluationException { return solve ( f , min , max ) ; }
JacksonDatabind
39
src/main/java/com/fasterxml/jackson/databind/deser/std/NullifyingDeserializer.java
30
37
Jackson not continue to parse after DeserializationFeature.FAIL_ON_INVALID_SUBTYPE error
After FAIL_ON_INVALID_SUBTYPE error, jackson should continue to parse, but seems jackson doesn't. The output: ``` CallRecord [version=0.0, application=123, ] // doesn't read item2 which is valid CallRecord [version=0.0, application=123, ] CallRecord [version=0.0, ] // doesn't read application after invalid item. ``` ``` jaca @JsonInclude(Include.NON_NULL) public class CallRecord { public float version; public String application; public Item item; public Item item2; public CallRecord() {} public static void main(final String[] args) throws IOException { final ObjectMapper objectMapper = new ObjectMapper().disable(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES); final CallRecord call = new CallRecord(); final Event event = new Event(); event.location = "location1"; call.item = event; call.item2 = event; call.application = "123"; // System.out.println(objectMapper.writeValueAsString(call)); String json = "{\"version\":0.0,\"application\":\"123\",\"item\":{\"type\":\"xevent\",\"location\":\"location1\"},\"item2\":{\"type\":\"event\",\"location\":\"location1\"}}"; // can't read item2 - which is valid System.out.println(objectMapper.readValue(json, CallRecord.class)); json = "{\"version\":0.0,\"application\":\"123\"},{\"item\":{\"type\":\"xevent\",\"location\":\"location1\"}"; System.out.println(objectMapper.readValue(json, CallRecord.class)); json = "{\"item\":{\"type\":\"xevent\",\"location\":\"location1\"}, \"version\":0.0,\"application\":\"123\"}"; // order matters: move item to the fornt, now it can't read application property System.out.println(objectMapper.readValue(json, CallRecord.class)); } @Override public String toString() { final StringBuilder builder = new StringBuilder(); builder.append("CallRecord [version=").append(version).append(", "); if (application != null) { builder.append("application=").append(application).append(", "); } if (item != null) { builder.append("item=").append(item); } builder.append("]"); return builder.toString(); } } @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type", visible = true) @JsonSubTypes({@Type(value = Event.class, name = Event.TYPE)}) public interface Item { } public final class Event implements Item { public String location; public static final String TYPE = "event"; public Event() {} } ```
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { // 29-Jan-2016, tatu: Simple skipping for all other tokens, but FIELD_NAME bit // special unfortunately p.skipChildren(); return null; }
@ Override public Object deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { p . skipChildren ( ) ; return null ; }
@Override public Object deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { // 29-Jan-2016, tatu: Simple skipping for all other tokens, but FIELD_NAME bit // special unfortunately if (p.hasToken(JsonToken.FIELD_NAME)) { while (true) { JsonToken t = p.nextToken(); if ((t == null) || (t == JsonToken.END_OBJECT)) { break; } p.skipChildren(); } } else { p.skipChildren(); } return null; }
@ Override public Object deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { if ( p . hasToken ( JsonToken . FIELD_NAME ) ) { while ( true ) { JsonToken t = p . nextToken ( ) ; if ( ( t == null ) || ( t == JsonToken . END_OBJECT ) ) { break ; } p . skipChildren ( ) ; } } else { p . skipChildren ( ) ; } return null ; }
JacksonDatabind
5
src/main/java/com/fasterxml/jackson/databind/introspect/AnnotatedClass.java
634
662
Mixin annotations lost when using a mixin class hierarchy with non-mixin interfaces
In summary, mixin annotations are lost when Jackson scans a parent mixin class with Json annotations followed by an interface implemented by the parent mixin class that does not have the same Json annotations. Jackson version: 2.4.0 Detail: I have the following class structure ``` java public interface Contact { String getCity(); } public class ContactImpl implements Contact { public String getCity() { ... } } public class ContactMixin implements Contact { @JsonProperty public String getCity() { return null; } } public interface Person extends Contact {} public class PersonImpl extends ContactImpl implements Person {} public class PersonMixin extends ContactMixin implements Person {} ``` and I configure a module as ``` java // There are other getters/properties in the Impl class that do not need to be serialized and so // I am using the Mixin to match the interface and explicitly annotate all the inherited methods module.disable(MapperFeature.ALLOW_FINAL_FIELDS_AS_MUTATORS) .disable(MapperFeature.AUTO_DETECT_FIELDS) .disable(MapperFeature.AUTO_DETECT_GETTERS) .disable(MapperFeature.AUTO_DETECT_IS_GETTERS) .disable(MapperFeature.INFER_PROPERTY_MUTATORS); module.setMixInAnnotation(Person.class, PersonMixin.class); ``` When a `PersonImpl` instance is serialized, `city` is not included. I debugged the code and this is what happens: In `AnnotatedClass.resolveMemberMethods()` the supertypes of `PersonImpl` are `[Person.class, Contact.class, ContactImpl.class]` in that order. It starts with `Person` for which it finds `PersonMixin` and proceeds to `AnnotatedClass._addMethodMixIns()`. Here the `parents` list has `[PersonMixin, ContactMixin, Contact]`. When it processes `ContactMixin` it adds `getCity()` with the `JsonProperty` annotation. Then it processes `Contact`, doesn't find `getCity()` in `methods` map and so creates a new `AnnotatedMethod` for `getCity()` with the one from the interface which has no annotation which replaces the one from `ContactMixin` The workaround for this issue is to explicitly add any parent mixins to the module i.e. ``` java module.setMixInAnnotation(Contact.class, ContactMixin.class); ```
protected void _addMethodMixIns(Class<?> targetClass, AnnotatedMethodMap methods, Class<?> mixInCls, AnnotatedMethodMap mixIns) { List<Class<?>> parents = new ArrayList<Class<?>>(); parents.add(mixInCls); ClassUtil.findSuperTypes(mixInCls, targetClass, parents); for (Class<?> mixin : parents) { for (Method m : mixin.getDeclaredMethods()) { if (!_isIncludableMemberMethod(m)) { continue; } AnnotatedMethod am = methods.find(m); /* Do we already have a method to augment (from sub-class * that will mask this mixIn)? If so, add if visible * without masking (no such annotation) */ if (am != null) { _addMixUnders(m, am); /* Otherwise will have precedence, but must wait * until we find the real method (mixIn methods are * just placeholder, can't be called) */ } else { // Well, or, as per [Issue#515], multi-level merge within mixins... mixIns.add(_constructMethod(m)); } } } }
protected void _addMethodMixIns ( Class < ? > targetClass , AnnotatedMethodMap methods , Class < ? > mixInCls , AnnotatedMethodMap mixIns ) { List < Class < ? > > parents = new ArrayList < Class < ? > > ( ) ; parents . add ( mixInCls ) ; ClassUtil . findSuperTypes ( mixInCls , targetClass , parents ) ; for ( Class < ? > mixin : parents ) { for ( Method m : mixin . getDeclaredMethods ( ) ) { if ( ! _isIncludableMemberMethod ( m ) ) { continue ; } AnnotatedMethod am = methods . find ( m ) ; if ( am != null ) { _addMixUnders ( m , am ) ; } else { mixIns . add ( _constructMethod ( m ) ) ; } } } }
protected void _addMethodMixIns(Class<?> targetClass, AnnotatedMethodMap methods, Class<?> mixInCls, AnnotatedMethodMap mixIns) { List<Class<?>> parents = new ArrayList<Class<?>>(); parents.add(mixInCls); ClassUtil.findSuperTypes(mixInCls, targetClass, parents); for (Class<?> mixin : parents) { for (Method m : mixin.getDeclaredMethods()) { if (!_isIncludableMemberMethod(m)) { continue; } AnnotatedMethod am = methods.find(m); /* Do we already have a method to augment (from sub-class * that will mask this mixIn)? If so, add if visible * without masking (no such annotation) */ if (am != null) { _addMixUnders(m, am); /* Otherwise will have precedence, but must wait * until we find the real method (mixIn methods are * just placeholder, can't be called) */ } else { // Well, or, as per [Issue#515], multi-level merge within mixins... am = mixIns.find(m); if (am != null) { _addMixUnders(m, am); } else { mixIns.add(_constructMethod(m)); } } } } }
protected void _addMethodMixIns ( Class < ? > targetClass , AnnotatedMethodMap methods , Class < ? > mixInCls , AnnotatedMethodMap mixIns ) { List < Class < ? > > parents = new ArrayList < Class < ? > > ( ) ; parents . add ( mixInCls ) ; ClassUtil . findSuperTypes ( mixInCls , targetClass , parents ) ; for ( Class < ? > mixin : parents ) { for ( Method m : mixin . getDeclaredMethods ( ) ) { if ( ! _isIncludableMemberMethod ( m ) ) { continue ; } AnnotatedMethod am = methods . find ( m ) ; if ( am != null ) { _addMixUnders ( m , am ) ; } else { am = mixIns . find ( m ) ; if ( am != null ) { _addMixUnders ( m , am ) ; } else { mixIns . add ( _constructMethod ( m ) ) ; } } } } }
Codec
18
src/main/java/org/apache/commons/codec/binary/StringUtils.java
71
82
StringUtils.equals(CharSequence cs1, CharSequence cs2) can fail with String Index OBE
StringUtils.equals(CharSequence cs1, CharSequence cs2) fails with String Index OBE if the two sequences are different lengths.
public static boolean equals(final CharSequence cs1, final CharSequence cs2) { if (cs1 == cs2) { return true; } if (cs1 == null || cs2 == null) { return false; } if (cs1 instanceof String && cs2 instanceof String) { return cs1.equals(cs2); } return CharSequenceUtils.regionMatches(cs1, false, 0, cs2, 0, Math.max(cs1.length(), cs2.length())); }
public static boolean equals ( final CharSequence cs1 , final CharSequence cs2 ) { if ( cs1 == cs2 ) { return true ; } if ( cs1 == null || cs2 == null ) { return false ; } if ( cs1 instanceof String && cs2 instanceof String ) { return cs1 . equals ( cs2 ) ; } return CharSequenceUtils . regionMatches ( cs1 , false , 0 , cs2 , 0 , Math . max ( cs1 . length ( ) , cs2 . length ( ) ) ) ; }
public static boolean equals(final CharSequence cs1, final CharSequence cs2) { if (cs1 == cs2) { return true; } if (cs1 == null || cs2 == null) { return false; } if (cs1 instanceof String && cs2 instanceof String) { return cs1.equals(cs2); } return cs1.length() == cs2.length() && CharSequenceUtils.regionMatches(cs1, false, 0, cs2, 0, cs1.length()); }
public static boolean equals ( final CharSequence cs1 , final CharSequence cs2 ) { if ( cs1 == cs2 ) { return true ; } if ( cs1 == null || cs2 == null ) { return false ; } if ( cs1 instanceof String && cs2 instanceof String ) { return cs1 . equals ( cs2 ) ; } return cs1 . length ( ) == cs2 . length ( ) && CharSequenceUtils . regionMatches ( cs1 , false , 0 , cs2 , 0 , cs1 . length ( ) ) ; }
JacksonDatabind
97
src/main/java/com/fasterxml/jackson/databind/node/POJONode.java
104
116
Context attributes are not passed/available to custom serializer if object is in POJO
Below is a test case where I create a custom serializer and use it to serialize an object 1) in a HashMap and 2) in an ObjectNode. In both cases I pass attribute to the serializer like this: `mapper.writer().withAttribute("myAttr", "Hello!")` Serializing HashMap works as expected, but during ObjectNode serialization the attribute is null . It seems that in both cases the custom serializer should get access to the passed attribute and so both lines in the output should contain "Hello!" Produced output from running testCase.test() ``` {"data":{"aStr":"The value is: Hello!"}} {"data":{"aStr":"The value is: NULL"}} ``` Test case: ``` import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.ser.std.StdSerializer; import java.io.IOException; import java.util.HashMap; import java.util.Map; public class TestCase { public final static ObjectMapper mapper = new ObjectMapper(); @JsonSerialize(using = TestCase.CustomSer.class) public static class Data { public String aStr; } public static class CustomSer extends StdSerializer<Data> { public CustomSer() { super(Data.class); } @Override public void serialize(Data value, JsonGenerator gen, SerializerProvider provider) throws IOException { String attrStr = (String) provider.getAttribute("myAttr"); gen.writeStartObject(); gen.writeObjectField("aStr", "The value is: " + (attrStr == null ? "NULL" : attrStr)); gen.writeEndObject(); } } public static void test() throws IOException { Data data = new Data(); data.aStr = "Hello"; Map<String, Object> mapTest = new HashMap<>(); mapTest.put("data", data); ObjectNode treeTest = mapper.createObjectNode(); treeTest.putPOJO("data", data); String mapOut = mapper.writer().withAttribute("myAttr", "Hello!").writeValueAsString(mapTest); System.out.println(mapOut); String treeOut = mapper.writer().withAttribute("myAttr", "Hello!").writeValueAsString(treeTest); System.out.println(treeOut); } } ```
@Override public final void serialize(JsonGenerator gen, SerializerProvider ctxt) throws IOException { if (_value == null) { ctxt.defaultSerializeNull(gen); } else if (_value instanceof JsonSerializable) { ((JsonSerializable) _value).serialize(gen, ctxt); } else { // 25-May-2018, tatu: [databind#1991] do not call via generator but through context; // this to preserve contextual information gen.writeObject(_value); } }
@ Override public final void serialize ( JsonGenerator gen , SerializerProvider ctxt ) throws IOException { if ( _value == null ) { ctxt . defaultSerializeNull ( gen ) ; } else if ( _value instanceof JsonSerializable ) { ( ( JsonSerializable ) _value ) . serialize ( gen , ctxt ) ; } else { gen . writeObject ( _value ) ; } }
@Override public final void serialize(JsonGenerator gen, SerializerProvider ctxt) throws IOException { if (_value == null) { ctxt.defaultSerializeNull(gen); } else if (_value instanceof JsonSerializable) { ((JsonSerializable) _value).serialize(gen, ctxt); } else { // 25-May-2018, tatu: [databind#1991] do not call via generator but through context; // this to preserve contextual information ctxt.defaultSerializeValue(_value, gen); } }
@ Override public final void serialize ( JsonGenerator gen , SerializerProvider ctxt ) throws IOException { if ( _value == null ) { ctxt . defaultSerializeNull ( gen ) ; } else if ( _value instanceof JsonSerializable ) { ( ( JsonSerializable ) _value ) . serialize ( gen , ctxt ) ; } else { ctxt . defaultSerializeValue ( _value , gen ) ; } }
Compress
45
src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
474
492
TarUtils.formatLongOctalOrBinaryBytes never uses result of formatLongBinary
if the length < 9, formatLongBinary is executed, then overwritten by the results of formatBigIntegerBinary. If the results are not ignored, a unit test would fail. Also, do the binary hacks need to support negative numbers?
public static int formatLongOctalOrBinaryBytes( final long value, final byte[] buf, final int offset, final int length) { // Check whether we are dealing with UID/GID or SIZE field final long maxAsOctalChar = length == TarConstants.UIDLEN ? TarConstants.MAXID : TarConstants.MAXSIZE; final boolean negative = value < 0; if (!negative && value <= maxAsOctalChar) { // OK to store as octal chars return formatLongOctalBytes(value, buf, offset, length); } if (length < 9) { formatLongBinary(value, buf, offset, length, negative); } formatBigIntegerBinary(value, buf, offset, length, negative); buf[offset] = (byte) (negative ? 0xff : 0x80); return offset + length; }
public static int formatLongOctalOrBinaryBytes ( final long value , final byte [ ] buf , final int offset , final int length ) { final long maxAsOctalChar = length == TarConstants . UIDLEN ? TarConstants . MAXID : TarConstants . MAXSIZE ; final boolean negative = value < 0 ; if ( ! negative && value <= maxAsOctalChar ) { return formatLongOctalBytes ( value , buf , offset , length ) ; } if ( length < 9 ) { formatLongBinary ( value , buf , offset , length , negative ) ; } formatBigIntegerBinary ( value , buf , offset , length , negative ) ; buf [ offset ] = ( byte ) ( negative ? 0xff : 0x80 ) ; return offset + length ; }
public static int formatLongOctalOrBinaryBytes( final long value, final byte[] buf, final int offset, final int length) { // Check whether we are dealing with UID/GID or SIZE field final long maxAsOctalChar = length == TarConstants.UIDLEN ? TarConstants.MAXID : TarConstants.MAXSIZE; final boolean negative = value < 0; if (!negative && value <= maxAsOctalChar) { // OK to store as octal chars return formatLongOctalBytes(value, buf, offset, length); } if (length < 9) { formatLongBinary(value, buf, offset, length, negative); } else { formatBigIntegerBinary(value, buf, offset, length, negative); } buf[offset] = (byte) (negative ? 0xff : 0x80); return offset + length; }
public static int formatLongOctalOrBinaryBytes ( final long value , final byte [ ] buf , final int offset , final int length ) { final long maxAsOctalChar = length == TarConstants . UIDLEN ? TarConstants . MAXID : TarConstants . MAXSIZE ; final boolean negative = value < 0 ; if ( ! negative && value <= maxAsOctalChar ) { return formatLongOctalBytes ( value , buf , offset , length ) ; } if ( length < 9 ) { formatLongBinary ( value , buf , offset , length , negative ) ; } else { formatBigIntegerBinary ( value , buf , offset , length , negative ) ; } buf [ offset ] = ( byte ) ( negative ? 0xff : 0x80 ) ; return offset + length ; }
JacksonDatabind
78
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
110
158
Jackson Deserializer security vulnerability via default typing (CVE-2017-7525)
I have send email to info@fasterxml.com
@Override public JsonDeserializer<Object> createBeanDeserializer(DeserializationContext ctxt, JavaType type, BeanDescription beanDesc) throws JsonMappingException { final DeserializationConfig config = ctxt.getConfig(); // We may also have custom overrides: JsonDeserializer<Object> custom = _findCustomBeanDeserializer(type, config, beanDesc); if (custom != null) { return custom; } /* One more thing to check: do we have an exception type * (Throwable or its sub-classes)? If so, need slightly * different handling. */ if (type.isThrowable()) { return buildThrowableDeserializer(ctxt, type, beanDesc); } /* Or, for abstract types, may have alternate means for resolution * (defaulting, materialization) */ // 29-Nov-2015, tatu: Also, filter out calls to primitive types, they are // not something we could materialize anything for if (type.isAbstract() && !type.isPrimitive() && !type.isEnumType()) { // Let's make it possible to materialize abstract types. JavaType concreteType = materializeAbstractType(ctxt, type, beanDesc); if (concreteType != null) { /* important: introspect actual implementation (abstract class or * interface doesn't have constructors, for one) */ beanDesc = config.introspect(concreteType); return buildBeanDeserializer(ctxt, concreteType, beanDesc); } } // Otherwise, may want to check handlers for standard types, from superclass: @SuppressWarnings("unchecked") JsonDeserializer<Object> deser = (JsonDeserializer<Object>) findStdDeserializer(ctxt, type, beanDesc); if (deser != null) { return deser; } // Otherwise: could the class be a Bean class? If not, bail out if (!isPotentialBeanType(type.getRawClass())) { return null; } // For checks like [databind#1599] // Use generic bean introspection to build deserializer return buildBeanDeserializer(ctxt, type, beanDesc); }
@ Override public JsonDeserializer < Object > createBeanDeserializer ( DeserializationContext ctxt , JavaType type , BeanDescription beanDesc ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; JsonDeserializer < Object > custom = _findCustomBeanDeserializer ( type , config , beanDesc ) ; if ( custom != null ) { return custom ; } if ( type . isThrowable ( ) ) { return buildThrowableDeserializer ( ctxt , type , beanDesc ) ; } if ( type . isAbstract ( ) && ! type . isPrimitive ( ) && ! type . isEnumType ( ) ) { JavaType concreteType = materializeAbstractType ( ctxt , type , beanDesc ) ; if ( concreteType != null ) { beanDesc = config . introspect ( concreteType ) ; return buildBeanDeserializer ( ctxt , concreteType , beanDesc ) ; } } @ SuppressWarnings ( "unchecked" ) JsonDeserializer < Object > deser = ( JsonDeserializer < Object > ) findStdDeserializer ( ctxt , type , beanDesc ) ; if ( deser != null ) { return deser ; } if ( ! isPotentialBeanType ( type . getRawClass ( ) ) ) { return null ; } return buildBeanDeserializer ( ctxt , type , beanDesc ) ; }
@Override public JsonDeserializer<Object> createBeanDeserializer(DeserializationContext ctxt, JavaType type, BeanDescription beanDesc) throws JsonMappingException { final DeserializationConfig config = ctxt.getConfig(); // We may also have custom overrides: JsonDeserializer<Object> custom = _findCustomBeanDeserializer(type, config, beanDesc); if (custom != null) { return custom; } /* One more thing to check: do we have an exception type * (Throwable or its sub-classes)? If so, need slightly * different handling. */ if (type.isThrowable()) { return buildThrowableDeserializer(ctxt, type, beanDesc); } /* Or, for abstract types, may have alternate means for resolution * (defaulting, materialization) */ // 29-Nov-2015, tatu: Also, filter out calls to primitive types, they are // not something we could materialize anything for if (type.isAbstract() && !type.isPrimitive() && !type.isEnumType()) { // Let's make it possible to materialize abstract types. JavaType concreteType = materializeAbstractType(ctxt, type, beanDesc); if (concreteType != null) { /* important: introspect actual implementation (abstract class or * interface doesn't have constructors, for one) */ beanDesc = config.introspect(concreteType); return buildBeanDeserializer(ctxt, concreteType, beanDesc); } } // Otherwise, may want to check handlers for standard types, from superclass: @SuppressWarnings("unchecked") JsonDeserializer<Object> deser = (JsonDeserializer<Object>) findStdDeserializer(ctxt, type, beanDesc); if (deser != null) { return deser; } // Otherwise: could the class be a Bean class? If not, bail out if (!isPotentialBeanType(type.getRawClass())) { return null; } // For checks like [databind#1599] checkIllegalTypes(ctxt, type, beanDesc); // Use generic bean introspection to build deserializer return buildBeanDeserializer(ctxt, type, beanDesc); }
@ Override public JsonDeserializer < Object > createBeanDeserializer ( DeserializationContext ctxt , JavaType type , BeanDescription beanDesc ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; JsonDeserializer < Object > custom = _findCustomBeanDeserializer ( type , config , beanDesc ) ; if ( custom != null ) { return custom ; } if ( type . isThrowable ( ) ) { return buildThrowableDeserializer ( ctxt , type , beanDesc ) ; } if ( type . isAbstract ( ) && ! type . isPrimitive ( ) && ! type . isEnumType ( ) ) { JavaType concreteType = materializeAbstractType ( ctxt , type , beanDesc ) ; if ( concreteType != null ) { beanDesc = config . introspect ( concreteType ) ; return buildBeanDeserializer ( ctxt , concreteType , beanDesc ) ; } } @ SuppressWarnings ( "unchecked" ) JsonDeserializer < Object > deser = ( JsonDeserializer < Object > ) findStdDeserializer ( ctxt , type , beanDesc ) ; if ( deser != null ) { return deser ; } if ( ! isPotentialBeanType ( type . getRawClass ( ) ) ) { return null ; } checkIllegalTypes ( ctxt , type , beanDesc ) ; return buildBeanDeserializer ( ctxt , type , beanDesc ) ; }
Compress
12
src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
172
238
TarArchiveInputStream throws IllegalArgumentException instead of IOException
TarArchiveInputStream is throwing IllegalArgumentException instead of IOException on corrupt files, in direct contradiction to the Javadoc. Here is a stack-trace: {code} java.lang.IllegalArgumentException: Invalid byte -1 at offset 7 in '<some bytes>' len=8 at org.apache.commons.compress.archivers.tar.TarUtils.parseOctal(TarUtils.java:86) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:790) at org.apache.commons.compress.archivers.tar.TarArchiveEntry.<init>(TarArchiveEntry.java:308) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextTarEntry(TarArchiveInputStream.java:198) at org.apache.commons.compress.archivers.tar.TarArchiveInputStream.getNextEntry(TarArchiveInputStream.java:380) at de.schlichtherle.truezip.fs.archive.tar.TarInputShop.<init>(TarInputShop.java:91) at de.schlichtherle.truezip.fs.archive.tar.TarDriver.newTarInputShop(TarDriver.java:159) at de.schlichtherle.truezip.fs.archive.tar.TarGZipDriver.newTarInputShop(TarGZipDriver.java:82) at de.schlichtherle.truezip.fs.archive.tar.TarDriver.newInputShop(TarDriver.java:151) at de.schlichtherle.truezip.fs.archive.tar.TarDriver.newInputShop(TarDriver.java:47) at de.schlichtherle.truezip.fs.archive.FsDefaultArchiveController.mount(FsDefaultArchiveController.java:170) at de.schlichtherle.truezip.fs.archive.FsFileSystemArchiveController$ResetFileSystem.autoMount(FsFileSystemArchiveController.java:98) at de.schlichtherle.truezip.fs.archive.FsFileSystemArchiveController.autoMount(FsFileSystemArchiveController.java:47) at de.schlichtherle.truezip.fs.archive.FsArchiveController.autoMount(FsArchiveController.java:129) at de.schlichtherle.truezip.fs.archive.FsArchiveController.getEntry(FsArchiveController.java:160) at de.schlichtherle.truezip.fs.archive.FsContextController.getEntry(FsContextController.java:117) at de.schlichtherle.truezip.fs.FsDecoratingController.getEntry(FsDecoratingController.java:76) at de.schlichtherle.truezip.fs.FsDecoratingController.getEntry(FsDecoratingController.java:76) at de.schlichtherle.truezip.fs.FsConcurrentController.getEntry(FsConcurrentController.java:164) at de.schlichtherle.truezip.fs.FsSyncController.getEntry(FsSyncController.java:108) at de.schlichtherle.truezip.fs.FsFederatingController.getEntry(FsFederatingController.java:156) at de.schlichtherle.truezip.nio.file.TFileSystem.newDirectoryStream(TFileSystem.java:348) at de.schlichtherle.truezip.nio.file.TPath.newDirectoryStream(TPath.java:963) at de.schlichtherle.truezip.nio.file.TFileSystemProvider.newDirectoryStream(TFileSystemProvider.java:344) at java.nio.file.Files.newDirectoryStream(Files.java:400) at com.googlecode.boostmavenproject.GetSourcesMojo.convertToJar(GetSourcesMojo.java:248) at com.googlecode.boostmavenproject.GetSourcesMojo.download(GetSourcesMojo.java:221) at com.googlecode.boostmavenproject.GetSourcesMojo.execute(GetSourcesMojo.java:111) at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:101) ... 20 more {code} Expected behavior: TarArchiveInputStream should wrap the IllegalArgumentException in an IOException.
public TarArchiveEntry getNextTarEntry() throws IOException { if (hasHitEOF) { return null; } if (currEntry != null) { long numToSkip = entrySize - entryOffset; while (numToSkip > 0) { long skipped = skip(numToSkip); if (skipped <= 0) { throw new RuntimeException("failed to skip current tar entry"); } numToSkip -= skipped; } readBuf = null; } byte[] headerBuf = getRecord(); if (hasHitEOF) { currEntry = null; return null; } currEntry = new TarArchiveEntry(headerBuf); entryOffset = 0; entrySize = currEntry.getSize(); if (currEntry.isGNULongNameEntry()) { // read in the name StringBuffer longName = new StringBuffer(); byte[] buf = new byte[SMALL_BUFFER_SIZE]; int length = 0; while ((length = read(buf)) >= 0) { longName.append(new String(buf, 0, length)); } getNextEntry(); if (currEntry == null) { // Bugzilla: 40334 // Malformed tar file - long entry name not followed by entry return null; } // remove trailing null terminator if (longName.length() > 0 && longName.charAt(longName.length() - 1) == 0) { longName.deleteCharAt(longName.length() - 1); } currEntry.setName(longName.toString()); } if (currEntry.isPaxHeader()){ // Process Pax headers paxHeaders(); } if (currEntry.isGNUSparse()){ // Process sparse files readGNUSparse(); } // If the size of the next element in the archive has changed // due to a new size being reported in the posix header // information, we update entrySize here so that it contains // the correct value. entrySize = currEntry.getSize(); return currEntry; }
public TarArchiveEntry getNextTarEntry ( ) throws IOException { if ( hasHitEOF ) { return null ; } if ( currEntry != null ) { long numToSkip = entrySize - entryOffset ; while ( numToSkip > 0 ) { long skipped = skip ( numToSkip ) ; if ( skipped <= 0 ) { throw new RuntimeException ( "failed to skip current tar entry" ) ; } numToSkip -= skipped ; } readBuf = null ; } byte [ ] headerBuf = getRecord ( ) ; if ( hasHitEOF ) { currEntry = null ; return null ; } currEntry = new TarArchiveEntry ( headerBuf ) ; entryOffset = 0 ; entrySize = currEntry . getSize ( ) ; if ( currEntry . isGNULongNameEntry ( ) ) { StringBuffer longName = new StringBuffer ( ) ; byte [ ] buf = new byte [ SMALL_BUFFER_SIZE ] ; int length = 0 ; while ( ( length = read ( buf ) ) >= 0 ) { longName . append ( new String ( buf , 0 , length ) ) ; } getNextEntry ( ) ; if ( currEntry == null ) { return null ; } if ( longName . length ( ) > 0 && longName . charAt ( longName . length ( ) - 1 ) == 0 ) { longName . deleteCharAt ( longName . length ( ) - 1 ) ; } currEntry . setName ( longName . toString ( ) ) ; } if ( currEntry . isPaxHeader ( ) ) { paxHeaders ( ) ; } if ( currEntry . isGNUSparse ( ) ) { readGNUSparse ( ) ; } entrySize = currEntry . getSize ( ) ; return currEntry ; }
public TarArchiveEntry getNextTarEntry() throws IOException { if (hasHitEOF) { return null; } if (currEntry != null) { long numToSkip = entrySize - entryOffset; while (numToSkip > 0) { long skipped = skip(numToSkip); if (skipped <= 0) { throw new RuntimeException("failed to skip current tar entry"); } numToSkip -= skipped; } readBuf = null; } byte[] headerBuf = getRecord(); if (hasHitEOF) { currEntry = null; return null; } try { currEntry = new TarArchiveEntry(headerBuf); } catch (IllegalArgumentException e) { IOException ioe = new IOException("Error detected parsing the header"); ioe.initCause(e); throw ioe; } entryOffset = 0; entrySize = currEntry.getSize(); if (currEntry.isGNULongNameEntry()) { // read in the name StringBuffer longName = new StringBuffer(); byte[] buf = new byte[SMALL_BUFFER_SIZE]; int length = 0; while ((length = read(buf)) >= 0) { longName.append(new String(buf, 0, length)); } getNextEntry(); if (currEntry == null) { // Bugzilla: 40334 // Malformed tar file - long entry name not followed by entry return null; } // remove trailing null terminator if (longName.length() > 0 && longName.charAt(longName.length() - 1) == 0) { longName.deleteCharAt(longName.length() - 1); } currEntry.setName(longName.toString()); } if (currEntry.isPaxHeader()){ // Process Pax headers paxHeaders(); } if (currEntry.isGNUSparse()){ // Process sparse files readGNUSparse(); } // If the size of the next element in the archive has changed // due to a new size being reported in the posix header // information, we update entrySize here so that it contains // the correct value. entrySize = currEntry.getSize(); return currEntry; }
public TarArchiveEntry getNextTarEntry ( ) throws IOException { if ( hasHitEOF ) { return null ; } if ( currEntry != null ) { long numToSkip = entrySize - entryOffset ; while ( numToSkip > 0 ) { long skipped = skip ( numToSkip ) ; if ( skipped <= 0 ) { throw new RuntimeException ( "failed to skip current tar entry" ) ; } numToSkip -= skipped ; } readBuf = null ; } byte [ ] headerBuf = getRecord ( ) ; if ( hasHitEOF ) { currEntry = null ; return null ; } try { currEntry = new TarArchiveEntry ( headerBuf ) ; } catch ( IllegalArgumentException e ) { IOException ioe = new IOException ( "Error detected parsing the header" ) ; ioe . initCause ( e ) ; throw ioe ; } entryOffset = 0 ; entrySize = currEntry . getSize ( ) ; if ( currEntry . isGNULongNameEntry ( ) ) { StringBuffer longName = new StringBuffer ( ) ; byte [ ] buf = new byte [ SMALL_BUFFER_SIZE ] ; int length = 0 ; while ( ( length = read ( buf ) ) >= 0 ) { longName . append ( new String ( buf , 0 , length ) ) ; } getNextEntry ( ) ; if ( currEntry == null ) { return null ; } if ( longName . length ( ) > 0 && longName . charAt ( longName . length ( ) - 1 ) == 0 ) { longName . deleteCharAt ( longName . length ( ) - 1 ) ; } currEntry . setName ( longName . toString ( ) ) ; } if ( currEntry . isPaxHeader ( ) ) { paxHeaders ( ) ; } if ( currEntry . isGNUSparse ( ) ) { readGNUSparse ( ) ; } entrySize = currEntry . getSize ( ) ; return currEntry ; }
Compress
13
src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java
511
513
ArchiveInputStream#getNextEntry(): Problems with WinZip directories with Umlauts
There is a problem when handling a WinZip-created zip with Umlauts in directories. I'm accessing a zip file created with WinZip containing a directory with an umlaut ("ä") with ArchiveInputStream. When creating the zip file the unicode-flag of winzip had been active. The following problem occurs when accessing the entries of the zip: the ArchiveEntry for a directory containing an umlaut is not marked as a directory and the file names for the directory and all files contained in that directory contain backslashes instead of slashes (i.e. completely different to all other files in directories with no umlaut in their path). There is no difference when letting the ArchiveStreamFactory decide which ArchiveInputStream to create or when using the ZipArchiveInputStream constructor with the correct encoding (I've tried different encodings CP437, CP850, ISO-8859-15, but still the problem persisted). This problem does not occur when using the very same zip file but compressed by 7zip or the built-in Windows 7 zip functionality.
protected void setName(String name) { this.name = name; }
protected void setName ( String name ) { this . name = name ; }
protected void setName(String name) { if (name != null && getPlatform() == PLATFORM_FAT && name.indexOf("/") == -1) { name = name.replace('\\', '/'); } this.name = name; }
protected void setName ( String name ) { if ( name != null && getPlatform ( ) == PLATFORM_FAT && name . indexOf ( "/" ) == - 1 ) { name = name . replace ( '\\' , '/' ) ; } this . name = name ; }
JacksonDatabind
96
src/main/java/com/fasterxml/jackson/databind/deser/BasicDeserializerFactory.java
701
752
Implicit constructor property names are not renamed properly with `PropertyNamingStrategy`
(note: spin-off from https://github.com/FasterXML/jackson-modules-java8/issues/67) Looks like something with linking of creator properties (constructor arguments for annotated/discovered constructor) to "regular" properties does not work when using `PropertyNamingStrategy`. Apparently this was working better until 2.9.1, but broke with 2.9.2.
protected void _addExplicitAnyCreator(DeserializationContext ctxt, BeanDescription beanDesc, CreatorCollector creators, CreatorCandidate candidate) throws JsonMappingException { // Looks like there's bit of magic regarding 1-parameter creators; others simpler: if (1 != candidate.paramCount()) { // Ok: for delegates, we want one and exactly one parameter without // injection AND without name int oneNotInjected = candidate.findOnlyParamWithoutInjection(); if (oneNotInjected >= 0) { // getting close; but most not have name if (candidate.paramName(oneNotInjected) == null) { _addExplicitDelegatingCreator(ctxt, beanDesc, creators, candidate); return; } } _addExplicitPropertyCreator(ctxt, beanDesc, creators, candidate); return; } AnnotatedParameter param = candidate.parameter(0); JacksonInject.Value injectId = candidate.injection(0); PropertyName paramName = candidate.explicitParamName(0); BeanPropertyDefinition paramDef = candidate.propertyDef(0); // If there's injection or explicit name, should be properties-based boolean useProps = (paramName != null) || (injectId != null); if (!useProps && (paramDef != null)) { // One more thing: if implicit name matches property with a getter // or field, we'll consider it property-based as well // 25-May-2018, tatu: as per [databind#2051], looks like we have to get // not implicit name, but name with possible strategy-based-rename // paramName = candidate.findImplicitParamName(0); paramName = candidate.findImplicitParamName(0); useProps = (paramName != null) && paramDef.couldSerialize(); } if (useProps) { SettableBeanProperty[] properties = new SettableBeanProperty[] { constructCreatorProperty(ctxt, beanDesc, paramName, 0, param, injectId) }; creators.addPropertyCreator(candidate.creator(), true, properties); return; } _handleSingleArgumentCreator(creators, candidate.creator(), true, true); // one more thing: sever link to creator property, to avoid possible later // problems with "unresolved" constructor property if (paramDef != null) { ((POJOPropertyBuilder) paramDef).removeConstructors(); } }
protected void _addExplicitAnyCreator ( DeserializationContext ctxt , BeanDescription beanDesc , CreatorCollector creators , CreatorCandidate candidate ) throws JsonMappingException { if ( 1 != candidate . paramCount ( ) ) { int oneNotInjected = candidate . findOnlyParamWithoutInjection ( ) ; if ( oneNotInjected >= 0 ) { if ( candidate . paramName ( oneNotInjected ) == null ) { _addExplicitDelegatingCreator ( ctxt , beanDesc , creators , candidate ) ; return ; } } _addExplicitPropertyCreator ( ctxt , beanDesc , creators , candidate ) ; return ; } AnnotatedParameter param = candidate . parameter ( 0 ) ; JacksonInject . Value injectId = candidate . injection ( 0 ) ; PropertyName paramName = candidate . explicitParamName ( 0 ) ; BeanPropertyDefinition paramDef = candidate . propertyDef ( 0 ) ; boolean useProps = ( paramName != null ) || ( injectId != null ) ; if ( ! useProps && ( paramDef != null ) ) { paramName = candidate . findImplicitParamName ( 0 ) ; useProps = ( paramName != null ) && paramDef . couldSerialize ( ) ; } if ( useProps ) { SettableBeanProperty [ ] properties = new SettableBeanProperty [ ] { constructCreatorProperty ( ctxt , beanDesc , paramName , 0 , param , injectId ) } ; creators . addPropertyCreator ( candidate . creator ( ) , true , properties ) ; return ; } _handleSingleArgumentCreator ( creators , candidate . creator ( ) , true , true ) ; if ( paramDef != null ) { ( ( POJOPropertyBuilder ) paramDef ) . removeConstructors ( ) ; } }
protected void _addExplicitAnyCreator(DeserializationContext ctxt, BeanDescription beanDesc, CreatorCollector creators, CreatorCandidate candidate) throws JsonMappingException { // Looks like there's bit of magic regarding 1-parameter creators; others simpler: if (1 != candidate.paramCount()) { // Ok: for delegates, we want one and exactly one parameter without // injection AND without name int oneNotInjected = candidate.findOnlyParamWithoutInjection(); if (oneNotInjected >= 0) { // getting close; but most not have name if (candidate.paramName(oneNotInjected) == null) { _addExplicitDelegatingCreator(ctxt, beanDesc, creators, candidate); return; } } _addExplicitPropertyCreator(ctxt, beanDesc, creators, candidate); return; } AnnotatedParameter param = candidate.parameter(0); JacksonInject.Value injectId = candidate.injection(0); PropertyName paramName = candidate.explicitParamName(0); BeanPropertyDefinition paramDef = candidate.propertyDef(0); // If there's injection or explicit name, should be properties-based boolean useProps = (paramName != null) || (injectId != null); if (!useProps && (paramDef != null)) { // One more thing: if implicit name matches property with a getter // or field, we'll consider it property-based as well // 25-May-2018, tatu: as per [databind#2051], looks like we have to get // not implicit name, but name with possible strategy-based-rename // paramName = candidate.findImplicitParamName(0); paramName = candidate.paramName(0); useProps = (paramName != null) && paramDef.couldSerialize(); } if (useProps) { SettableBeanProperty[] properties = new SettableBeanProperty[] { constructCreatorProperty(ctxt, beanDesc, paramName, 0, param, injectId) }; creators.addPropertyCreator(candidate.creator(), true, properties); return; } _handleSingleArgumentCreator(creators, candidate.creator(), true, true); // one more thing: sever link to creator property, to avoid possible later // problems with "unresolved" constructor property if (paramDef != null) { ((POJOPropertyBuilder) paramDef).removeConstructors(); } }
protected void _addExplicitAnyCreator ( DeserializationContext ctxt , BeanDescription beanDesc , CreatorCollector creators , CreatorCandidate candidate ) throws JsonMappingException { if ( 1 != candidate . paramCount ( ) ) { int oneNotInjected = candidate . findOnlyParamWithoutInjection ( ) ; if ( oneNotInjected >= 0 ) { if ( candidate . paramName ( oneNotInjected ) == null ) { _addExplicitDelegatingCreator ( ctxt , beanDesc , creators , candidate ) ; return ; } } _addExplicitPropertyCreator ( ctxt , beanDesc , creators , candidate ) ; return ; } AnnotatedParameter param = candidate . parameter ( 0 ) ; JacksonInject . Value injectId = candidate . injection ( 0 ) ; PropertyName paramName = candidate . explicitParamName ( 0 ) ; BeanPropertyDefinition paramDef = candidate . propertyDef ( 0 ) ; boolean useProps = ( paramName != null ) || ( injectId != null ) ; if ( ! useProps && ( paramDef != null ) ) { paramName = candidate . paramName ( 0 ) ; useProps = ( paramName != null ) && paramDef . couldSerialize ( ) ; } if ( useProps ) { SettableBeanProperty [ ] properties = new SettableBeanProperty [ ] { constructCreatorProperty ( ctxt , beanDesc , paramName , 0 , param , injectId ) } ; creators . addPropertyCreator ( candidate . creator ( ) , true , properties ) ; return ; } _handleSingleArgumentCreator ( creators , candidate . creator ( ) , true , true ) ; if ( paramDef != null ) { ( ( POJOPropertyBuilder ) paramDef ) . removeConstructors ( ) ; } }
Compress
44
src/main/java/org/apache/commons/compress/utils/ChecksumCalculatingInputStream.java
33
39
NullPointerException defect in ChecksumCalculatingInputStream#getValue()
NullPointerException defect in ChecksumCalculatingInputStream#getValue() detected as stated in pull request 33: https://github.com/apache/commons-compress/pull/33 Furthermore the following test describes the problem: {code:java} @Test(expected = NullPointerException.class) //I assume this behaviour to be a bug or at least a defect. public void testGetValueThrowsNullPointerException() { ChecksumCalculatingInputStream checksumCalculatingInputStream = new ChecksumCalculatingInputStream(null,null); checksumCalculatingInputStream.getValue(); } {code}
public ChecksumCalculatingInputStream(final Checksum checksum, final InputStream in) { this.checksum = checksum; this.in = in; }
public ChecksumCalculatingInputStream ( final Checksum checksum , final InputStream in ) { this . checksum = checksum ; this . in = in ; }
public ChecksumCalculatingInputStream(final Checksum checksum, final InputStream in) { if ( checksum == null ){ throw new NullPointerException("Parameter checksum must not be null"); } if ( in == null ){ throw new NullPointerException("Parameter in must not be null"); } this.checksum = checksum; this.in = in; }
public ChecksumCalculatingInputStream ( final Checksum checksum , final InputStream in ) { if ( checksum == null ) { throw new NullPointerException ( "Parameter checksum must not be null" ) ; } if ( in == null ) { throw new NullPointerException ( "Parameter in must not be null" ) ; } this . checksum = checksum ; this . in = in ; }
Math
7
src/main/java/org/apache/commons/math3/ode/AbstractIntegrator.java
296
405
event state not updated if an unrelated event triggers a RESET_STATE during ODE integration
When an ODE solver manages several different event types, there are some unwanted side effects. If one event handler asks for a RESET_STATE (for integration state) when its eventOccurred method is called, the other event handlers that did not trigger an event in the same step are not updated correctly, due to an early return. As a result, when the next step is processed with a reset integration state, the forgotten event still refer to the start date of the previous state. This implies that when these event handlers will be checked for In some cases, the function defining an event g(double t, double[] y) is called with state parameters y that are completely wrong. In one case when the y array should have contained values between -1 and +1, one function call got values up to 1.0e20. The attached file reproduces the problem.
protected double acceptStep(final AbstractStepInterpolator interpolator, final double[] y, final double[] yDot, final double tEnd) throws MaxCountExceededException, DimensionMismatchException, NoBracketingException { double previousT = interpolator.getGlobalPreviousTime(); final double currentT = interpolator.getGlobalCurrentTime(); // initialize the events states if needed if (! statesInitialized) { for (EventState state : eventsStates) { state.reinitializeBegin(interpolator); } statesInitialized = true; } // search for next events that may occur during the step final int orderingSign = interpolator.isForward() ? +1 : -1; SortedSet<EventState> occuringEvents = new TreeSet<EventState>(new Comparator<EventState>() { /** {@inheritDoc} */ public int compare(EventState es0, EventState es1) { return orderingSign * Double.compare(es0.getEventTime(), es1.getEventTime()); } }); for (final EventState state : eventsStates) { if (state.evaluateStep(interpolator)) { // the event occurs during the current step occuringEvents.add(state); } } while (!occuringEvents.isEmpty()) { // handle the chronologically first event final Iterator<EventState> iterator = occuringEvents.iterator(); final EventState currentEvent = iterator.next(); iterator.remove(); // restrict the interpolator to the first part of the step, up to the event final double eventT = currentEvent.getEventTime(); interpolator.setSoftPreviousTime(previousT); interpolator.setSoftCurrentTime(eventT); // get state at event time interpolator.setInterpolatedTime(eventT); final double[] eventY = interpolator.getInterpolatedState().clone(); // advance all event states to current time currentEvent.stepAccepted(eventT, eventY); isLastStep = currentEvent.stop(); // handle the first part of the step, up to the event for (final StepHandler handler : stepHandlers) { handler.handleStep(interpolator, isLastStep); } if (isLastStep) { // the event asked to stop integration System.arraycopy(eventY, 0, y, 0, y.length); for (final EventState remaining : occuringEvents) { remaining.stepAccepted(eventT, eventY); } return eventT; } boolean needReset = currentEvent.reset(eventT, eventY); if (needReset) { // some event handler has triggered changes that // invalidate the derivatives, we need to recompute them System.arraycopy(eventY, 0, y, 0, y.length); computeDerivatives(eventT, y, yDot); resetOccurred = true; for (final EventState remaining : occuringEvents) { remaining.stepAccepted(eventT, eventY); } return eventT; } // prepare handling of the remaining part of the step previousT = eventT; interpolator.setSoftPreviousTime(eventT); interpolator.setSoftCurrentTime(currentT); // check if the same event occurs again in the remaining part of the step if (currentEvent.evaluateStep(interpolator)) { // the event occurs during the current step occuringEvents.add(currentEvent); } } // last part of the step, after the last event interpolator.setInterpolatedTime(currentT); final double[] currentY = interpolator.getInterpolatedState(); for (final EventState state : eventsStates) { state.stepAccepted(currentT, currentY); isLastStep = isLastStep || state.stop(); } isLastStep = isLastStep || Precision.equals(currentT, tEnd, 1); // handle the remaining part of the step, after all events if any for (StepHandler handler : stepHandlers) { handler.handleStep(interpolator, isLastStep); } return currentT; }
protected double acceptStep ( final AbstractStepInterpolator interpolator , final double [ ] y , final double [ ] yDot , final double tEnd ) throws MaxCountExceededException , DimensionMismatchException , NoBracketingException { double previousT = interpolator . getGlobalPreviousTime ( ) ; final double currentT = interpolator . getGlobalCurrentTime ( ) ; if ( ! statesInitialized ) { for ( EventState state : eventsStates ) { state . reinitializeBegin ( interpolator ) ; } statesInitialized = true ; } final int orderingSign = interpolator . isForward ( ) ? + 1 : - 1 ; SortedSet < EventState > occuringEvents = new TreeSet < EventState > ( new Comparator < EventState > ( ) { public int compare ( EventState es0 , EventState es1 ) { return orderingSign * Double . compare ( es0 . getEventTime ( ) , es1 . getEventTime ( ) ) ; } } ) ; for ( final EventState state : eventsStates ) { if ( state . evaluateStep ( interpolator ) ) { occuringEvents . add ( state ) ; } } while ( ! occuringEvents . isEmpty ( ) ) { final Iterator < EventState > iterator = occuringEvents . iterator ( ) ; final EventState currentEvent = iterator . next ( ) ; iterator . remove ( ) ; final double eventT = currentEvent . getEventTime ( ) ; interpolator . setSoftPreviousTime ( previousT ) ; interpolator . setSoftCurrentTime ( eventT ) ; interpolator . setInterpolatedTime ( eventT ) ; final double [ ] eventY = interpolator . getInterpolatedState ( ) . clone ( ) ; currentEvent . stepAccepted ( eventT , eventY ) ; isLastStep = currentEvent . stop ( ) ; for ( final StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } if ( isLastStep ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; for ( final EventState remaining : occuringEvents ) { remaining . stepAccepted ( eventT , eventY ) ; } return eventT ; } boolean needReset = currentEvent . reset ( eventT , eventY ) ; if ( needReset ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; computeDerivatives ( eventT , y , yDot ) ; resetOccurred = true ; for ( final EventState remaining : occuringEvents ) { remaining . stepAccepted ( eventT , eventY ) ; } return eventT ; } previousT = eventT ; interpolator . setSoftPreviousTime ( eventT ) ; interpolator . setSoftCurrentTime ( currentT ) ; if ( currentEvent . evaluateStep ( interpolator ) ) { occuringEvents . add ( currentEvent ) ; } } interpolator . setInterpolatedTime ( currentT ) ; final double [ ] currentY = interpolator . getInterpolatedState ( ) ; for ( final EventState state : eventsStates ) { state . stepAccepted ( currentT , currentY ) ; isLastStep = isLastStep || state . stop ( ) ; } isLastStep = isLastStep || Precision . equals ( currentT , tEnd , 1 ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } return currentT ; }
protected double acceptStep(final AbstractStepInterpolator interpolator, final double[] y, final double[] yDot, final double tEnd) throws MaxCountExceededException, DimensionMismatchException, NoBracketingException { double previousT = interpolator.getGlobalPreviousTime(); final double currentT = interpolator.getGlobalCurrentTime(); // initialize the events states if needed if (! statesInitialized) { for (EventState state : eventsStates) { state.reinitializeBegin(interpolator); } statesInitialized = true; } // search for next events that may occur during the step final int orderingSign = interpolator.isForward() ? +1 : -1; SortedSet<EventState> occuringEvents = new TreeSet<EventState>(new Comparator<EventState>() { /** {@inheritDoc} */ public int compare(EventState es0, EventState es1) { return orderingSign * Double.compare(es0.getEventTime(), es1.getEventTime()); } }); for (final EventState state : eventsStates) { if (state.evaluateStep(interpolator)) { // the event occurs during the current step occuringEvents.add(state); } } while (!occuringEvents.isEmpty()) { // handle the chronologically first event final Iterator<EventState> iterator = occuringEvents.iterator(); final EventState currentEvent = iterator.next(); iterator.remove(); // restrict the interpolator to the first part of the step, up to the event final double eventT = currentEvent.getEventTime(); interpolator.setSoftPreviousTime(previousT); interpolator.setSoftCurrentTime(eventT); // get state at event time interpolator.setInterpolatedTime(eventT); final double[] eventY = interpolator.getInterpolatedState().clone(); // advance all event states to current time for (final EventState state : eventsStates) { state.stepAccepted(eventT, eventY); isLastStep = isLastStep || state.stop(); } // handle the first part of the step, up to the event for (final StepHandler handler : stepHandlers) { handler.handleStep(interpolator, isLastStep); } if (isLastStep) { // the event asked to stop integration System.arraycopy(eventY, 0, y, 0, y.length); return eventT; } boolean needReset = false; for (final EventState state : eventsStates) { needReset = needReset || state.reset(eventT, eventY); } if (needReset) { // some event handler has triggered changes that // invalidate the derivatives, we need to recompute them System.arraycopy(eventY, 0, y, 0, y.length); computeDerivatives(eventT, y, yDot); resetOccurred = true; return eventT; } // prepare handling of the remaining part of the step previousT = eventT; interpolator.setSoftPreviousTime(eventT); interpolator.setSoftCurrentTime(currentT); // check if the same event occurs again in the remaining part of the step if (currentEvent.evaluateStep(interpolator)) { // the event occurs during the current step occuringEvents.add(currentEvent); } } // last part of the step, after the last event interpolator.setInterpolatedTime(currentT); final double[] currentY = interpolator.getInterpolatedState(); for (final EventState state : eventsStates) { state.stepAccepted(currentT, currentY); isLastStep = isLastStep || state.stop(); } isLastStep = isLastStep || Precision.equals(currentT, tEnd, 1); // handle the remaining part of the step, after all events if any for (StepHandler handler : stepHandlers) { handler.handleStep(interpolator, isLastStep); } return currentT; }
protected double acceptStep ( final AbstractStepInterpolator interpolator , final double [ ] y , final double [ ] yDot , final double tEnd ) throws MaxCountExceededException , DimensionMismatchException , NoBracketingException { double previousT = interpolator . getGlobalPreviousTime ( ) ; final double currentT = interpolator . getGlobalCurrentTime ( ) ; if ( ! statesInitialized ) { for ( EventState state : eventsStates ) { state . reinitializeBegin ( interpolator ) ; } statesInitialized = true ; } final int orderingSign = interpolator . isForward ( ) ? + 1 : - 1 ; SortedSet < EventState > occuringEvents = new TreeSet < EventState > ( new Comparator < EventState > ( ) { public int compare ( EventState es0 , EventState es1 ) { return orderingSign * Double . compare ( es0 . getEventTime ( ) , es1 . getEventTime ( ) ) ; } } ) ; for ( final EventState state : eventsStates ) { if ( state . evaluateStep ( interpolator ) ) { occuringEvents . add ( state ) ; } } while ( ! occuringEvents . isEmpty ( ) ) { final Iterator < EventState > iterator = occuringEvents . iterator ( ) ; final EventState currentEvent = iterator . next ( ) ; iterator . remove ( ) ; final double eventT = currentEvent . getEventTime ( ) ; interpolator . setSoftPreviousTime ( previousT ) ; interpolator . setSoftCurrentTime ( eventT ) ; interpolator . setInterpolatedTime ( eventT ) ; final double [ ] eventY = interpolator . getInterpolatedState ( ) . clone ( ) ; for ( final EventState state : eventsStates ) { state . stepAccepted ( eventT , eventY ) ; isLastStep = isLastStep || state . stop ( ) ; } for ( final StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } if ( isLastStep ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; return eventT ; } boolean needReset = false ; for ( final EventState state : eventsStates ) { needReset = needReset || state . reset ( eventT , eventY ) ; } if ( needReset ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; computeDerivatives ( eventT , y , yDot ) ; resetOccurred = true ; return eventT ; } previousT = eventT ; interpolator . setSoftPreviousTime ( eventT ) ; interpolator . setSoftCurrentTime ( currentT ) ; if ( currentEvent . evaluateStep ( interpolator ) ) { occuringEvents . add ( currentEvent ) ; } } interpolator . setInterpolatedTime ( currentT ) ; final double [ ] currentY = interpolator . getInterpolatedState ( ) ; for ( final EventState state : eventsStates ) { state . stepAccepted ( currentT , currentY ) ; isLastStep = isLastStep || state . stop ( ) ; } isLastStep = isLastStep || Precision . equals ( currentT , tEnd , 1 ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } return currentT ; }
Mockito
15
src/org/mockito/internal/configuration/injection/FinalMockCandidateFilter.java
18
40
@InjectMocks should carry out their work by the method (and not by field) if available
@InjectMocks should carry out their work by the method (and not by field) if available. Now @InjectMocks work in properties and this affects CodeCoberage reports :)
public OngoingInjecter filterCandidate(final Collection<Object> mocks, final Field field, final Object fieldInstance) { if(mocks.size() == 1) { final Object matchingMock = mocks.iterator().next(); return new OngoingInjecter() { public boolean thenInject() { try { new FieldSetter(fieldInstance, field).set(matchingMock); } catch (Exception e) { throw new MockitoException("Problems injecting dependency in " + field.getName(), e); } return true; } }; } return new OngoingInjecter() { public boolean thenInject() { return false; } }; }
public OngoingInjecter filterCandidate ( final Collection < Object > mocks , final Field field , final Object fieldInstance ) { if ( mocks . size ( ) == 1 ) { final Object matchingMock = mocks . iterator ( ) . next ( ) ; return new OngoingInjecter ( ) { public boolean thenInject ( ) { try { new FieldSetter ( fieldInstance , field ) . set ( matchingMock ) ; } catch ( Exception e ) { throw new MockitoException ( "Problems injecting dependency in " + field . getName ( ) , e ) ; } return true ; } } ; } return new OngoingInjecter ( ) { public boolean thenInject ( ) { return false ; } } ; }
public OngoingInjecter filterCandidate(final Collection<Object> mocks, final Field field, final Object fieldInstance) { if(mocks.size() == 1) { final Object matchingMock = mocks.iterator().next(); return new OngoingInjecter() { public boolean thenInject() { try { if (!new BeanPropertySetter(fieldInstance, field).set(matchingMock)) { new FieldSetter(fieldInstance, field).set(matchingMock); } } catch (Exception e) { throw new MockitoException("Problems injecting dependency in " + field.getName(), e); } return true; } }; } return new OngoingInjecter() { public boolean thenInject() { return false; } }; }
public OngoingInjecter filterCandidate ( final Collection < Object > mocks , final Field field , final Object fieldInstance ) { if ( mocks . size ( ) == 1 ) { final Object matchingMock = mocks . iterator ( ) . next ( ) ; return new OngoingInjecter ( ) { public boolean thenInject ( ) { try { if ( ! new BeanPropertySetter ( fieldInstance , field ) . set ( matchingMock ) ) { new FieldSetter ( fieldInstance , field ) . set ( matchingMock ) ; } } catch ( Exception e ) { throw new MockitoException ( "Problems injecting dependency in " + field . getName ( ) , e ) ; } return true ; } } ; } return new OngoingInjecter ( ) { public boolean thenInject ( ) { return false ; } } ; }