Datasets:

Size Categories:
n<1K
ArXiv:
Tags:
defects4j_project
stringclasses
12 values
defects4j_bug_id
stringlengths
1
3
file_path
stringlengths
38
95
bug_start_line
stringlengths
1
4
bug_end_line
stringlengths
2
4
issue_title
stringlengths
13
150
issue_description
stringlengths
4
8.74k
original_src
stringlengths
44
9k
original_src_wo_comments
stringlengths
38
5.83k
fixed_src
stringlengths
40
9.55k
fixed_src_wo_comments
stringlengths
34
5.76k
Math
19
src/main/java/org/apache/commons/math3/optimization/direct/CMAESOptimizer.java
504
561
Wide bounds to CMAESOptimizer result in NaN parameters passed to fitness function
If you give large values as lower/upper bounds (for example -Double.MAX_VALUE as a lower bound), the optimizer can call the fitness function with parameters set to NaN. My guess is this is due to FitnessFunction.encode/decode generating NaN when normalizing/denormalizing parameters. For example, if the difference between the lower and upper bound is greater than Double.MAX_VALUE, encode could divide infinity by infinity.
private void checkParameters() { final double[] init = getStartPoint(); final double[] lB = getLowerBound(); final double[] uB = getUpperBound(); // Checks whether there is at least one finite bound value. boolean hasFiniteBounds = false; for (int i = 0; i < lB.length; i++) { if (!Double.isInfinite(lB[i]) || !Double.isInfinite(uB[i])) { hasFiniteBounds = true; break; } } // Checks whether there is at least one infinite bound value. boolean hasInfiniteBounds = false; if (hasFiniteBounds) { for (int i = 0; i < lB.length; i++) { if (Double.isInfinite(lB[i]) || Double.isInfinite(uB[i])) { hasInfiniteBounds = true; break; } } if (hasInfiniteBounds) { // If there is at least one finite bound, none can be infinite, // because mixed cases are not supported by the current code. throw new MathUnsupportedOperationException(); } else { // Convert API to internal handling of boundaries. boundaries = new double[2][]; boundaries[0] = lB; boundaries[1] = uB; // Abort early if the normalization will overflow (cf. "encode" method). } } else { // Convert API to internal handling of boundaries. boundaries = null; } if (inputSigma != null) { if (inputSigma.length != init.length) { throw new DimensionMismatchException(inputSigma.length, init.length); } for (int i = 0; i < init.length; i++) { if (inputSigma[i] < 0) { throw new NotPositiveException(inputSigma[i]); } if (boundaries != null) { if (inputSigma[i] > boundaries[1][i] - boundaries[0][i]) { throw new OutOfRangeException(inputSigma[i], 0, boundaries[1][i] - boundaries[0][i]); } } } } }
private void checkParameters ( ) { final double [ ] init = getStartPoint ( ) ; final double [ ] lB = getLowerBound ( ) ; final double [ ] uB = getUpperBound ( ) ; boolean hasFiniteBounds = false ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( ! Double . isInfinite ( lB [ i ] ) || ! Double . isInfinite ( uB [ i ] ) ) { hasFiniteBounds = true ; break ; } } boolean hasInfiniteBounds = false ; if ( hasFiniteBounds ) { for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( lB [ i ] ) || Double . isInfinite ( uB [ i ] ) ) { hasInfiniteBounds = true ; break ; } } if ( hasInfiniteBounds ) { throw new MathUnsupportedOperationException ( ) ; } else { boundaries = new double [ 2 ] [ ] ; boundaries [ 0 ] = lB ; boundaries [ 1 ] = uB ; } } else { boundaries = null ; } if ( inputSigma != null ) { if ( inputSigma . length != init . length ) { throw new DimensionMismatchException ( inputSigma . length , init . length ) ; } for ( int i = 0 ; i < init . length ; i ++ ) { if ( inputSigma [ i ] < 0 ) { throw new NotPositiveException ( inputSigma [ i ] ) ; } if ( boundaries != null ) { if ( inputSigma [ i ] > boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) { throw new OutOfRangeException ( inputSigma [ i ] , 0 , boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ; } } } } }
private void checkParameters() { final double[] init = getStartPoint(); final double[] lB = getLowerBound(); final double[] uB = getUpperBound(); // Checks whether there is at least one finite bound value. boolean hasFiniteBounds = false; for (int i = 0; i < lB.length; i++) { if (!Double.isInfinite(lB[i]) || !Double.isInfinite(uB[i])) { hasFiniteBounds = true; break; } } // Checks whether there is at least one infinite bound value. boolean hasInfiniteBounds = false; if (hasFiniteBounds) { for (int i = 0; i < lB.length; i++) { if (Double.isInfinite(lB[i]) || Double.isInfinite(uB[i])) { hasInfiniteBounds = true; break; } } if (hasInfiniteBounds) { // If there is at least one finite bound, none can be infinite, // because mixed cases are not supported by the current code. throw new MathUnsupportedOperationException(); } else { // Convert API to internal handling of boundaries. boundaries = new double[2][]; boundaries[0] = lB; boundaries[1] = uB; // Abort early if the normalization will overflow (cf. "encode" method). for (int i = 0; i < lB.length; i++) { if (Double.isInfinite(boundaries[1][i] - boundaries[0][i])) { final double max = Double.MAX_VALUE + boundaries[0][i]; final NumberIsTooLargeException e = new NumberIsTooLargeException(boundaries[1][i], max, true); e.getContext().addMessage(LocalizedFormats.OVERFLOW); e.getContext().addMessage(LocalizedFormats.INDEX, i); throw e; } } } } else { // Convert API to internal handling of boundaries. boundaries = null; } if (inputSigma != null) { if (inputSigma.length != init.length) { throw new DimensionMismatchException(inputSigma.length, init.length); } for (int i = 0; i < init.length; i++) { if (inputSigma[i] < 0) { throw new NotPositiveException(inputSigma[i]); } if (boundaries != null) { if (inputSigma[i] > boundaries[1][i] - boundaries[0][i]) { throw new OutOfRangeException(inputSigma[i], 0, boundaries[1][i] - boundaries[0][i]); } } } } }
private void checkParameters ( ) { final double [ ] init = getStartPoint ( ) ; final double [ ] lB = getLowerBound ( ) ; final double [ ] uB = getUpperBound ( ) ; boolean hasFiniteBounds = false ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( ! Double . isInfinite ( lB [ i ] ) || ! Double . isInfinite ( uB [ i ] ) ) { hasFiniteBounds = true ; break ; } } boolean hasInfiniteBounds = false ; if ( hasFiniteBounds ) { for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( lB [ i ] ) || Double . isInfinite ( uB [ i ] ) ) { hasInfiniteBounds = true ; break ; } } if ( hasInfiniteBounds ) { throw new MathUnsupportedOperationException ( ) ; } else { boundaries = new double [ 2 ] [ ] ; boundaries [ 0 ] = lB ; boundaries [ 1 ] = uB ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ) { final double max = Double . MAX_VALUE + boundaries [ 0 ] [ i ] ; final NumberIsTooLargeException e = new NumberIsTooLargeException ( boundaries [ 1 ] [ i ] , max , true ) ; e . getContext ( ) . addMessage ( LocalizedFormats . OVERFLOW ) ; e . getContext ( ) . addMessage ( LocalizedFormats . INDEX , i ) ; throw e ; } } } } else { boundaries = null ; } if ( inputSigma != null ) { if ( inputSigma . length != init . length ) { throw new DimensionMismatchException ( inputSigma . length , init . length ) ; } for ( int i = 0 ; i < init . length ; i ++ ) { if ( inputSigma [ i ] < 0 ) { throw new NotPositiveException ( inputSigma [ i ] ) ; } if ( boundaries != null ) { if ( inputSigma [ i ] > boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) { throw new OutOfRangeException ( inputSigma [ i ] , 0 , boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ; } } } } }
Compress
16
src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java
197
258
Too relaxed tar detection in ArchiveStreamFactory
The relaxed tar detection logic added in COMPRESS-117 unfortunately matches also some non-tar files like a [test AIFF file|https://svn.apache.org/repos/asf/tika/trunk/tika-parsers/src/test/resources/test-documents/testAIFF.aif] that Apache Tika uses. It would be good to improve the detection heuristics to still match files like the one in COMPRESS-117 but avoid false positives like the AIFF file in Tika.
public ArchiveInputStream createArchiveInputStream(final InputStream in) throws ArchiveException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } if (!in.markSupported()) { throw new IllegalArgumentException("Mark is not supported."); } final byte[] signature = new byte[12]; in.mark(signature.length); try { int signatureLength = in.read(signature); in.reset(); if (ZipArchiveInputStream.matches(signature, signatureLength)) { return new ZipArchiveInputStream(in); } else if (JarArchiveInputStream.matches(signature, signatureLength)) { return new JarArchiveInputStream(in); } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return new ArArchiveInputStream(in); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { return new CpioArchiveInputStream(in); } // Dump needs a bigger buffer to check the signature; final byte[] dumpsig = new byte[32]; in.mark(dumpsig.length); signatureLength = in.read(dumpsig); in.reset(); if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { return new DumpArchiveInputStream(in); } // Tar needs an even bigger buffer to check the signature; read the first block final byte[] tarheader = new byte[512]; in.mark(tarheader.length); signatureLength = in.read(tarheader); in.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { return new TarArchiveInputStream(in); } // COMPRESS-117 - improve auto-recognition if (signatureLength >= 512) { try { TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); // COMPRESS-191 - verify the header checksum tais.getNextEntry(); return new TarArchiveInputStream(in); } catch (Exception e) { // NOPMD // can generate IllegalArgumentException as well // as IOException // autodetection, simply not a TAR // ignored } } } catch (IOException e) { throw new ArchiveException("Could not use reset and mark operations.", e); } throw new ArchiveException("No Archiver found for the stream signature"); }
public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } if ( signatureLength >= 512 ) { try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; tais . getNextEntry ( ) ; return new TarArchiveInputStream ( in ) ; } catch ( Exception e ) { } } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; }
public ArchiveInputStream createArchiveInputStream(final InputStream in) throws ArchiveException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } if (!in.markSupported()) { throw new IllegalArgumentException("Mark is not supported."); } final byte[] signature = new byte[12]; in.mark(signature.length); try { int signatureLength = in.read(signature); in.reset(); if (ZipArchiveInputStream.matches(signature, signatureLength)) { return new ZipArchiveInputStream(in); } else if (JarArchiveInputStream.matches(signature, signatureLength)) { return new JarArchiveInputStream(in); } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return new ArArchiveInputStream(in); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { return new CpioArchiveInputStream(in); } // Dump needs a bigger buffer to check the signature; final byte[] dumpsig = new byte[32]; in.mark(dumpsig.length); signatureLength = in.read(dumpsig); in.reset(); if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { return new DumpArchiveInputStream(in); } // Tar needs an even bigger buffer to check the signature; read the first block final byte[] tarheader = new byte[512]; in.mark(tarheader.length); signatureLength = in.read(tarheader); in.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { return new TarArchiveInputStream(in); } // COMPRESS-117 - improve auto-recognition if (signatureLength >= 512) { try { TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); // COMPRESS-191 - verify the header checksum if (tais.getNextTarEntry().isCheckSumOK()) { return new TarArchiveInputStream(in); } } catch (Exception e) { // NOPMD // can generate IllegalArgumentException as well // as IOException // autodetection, simply not a TAR // ignored } } } catch (IOException e) { throw new ArchiveException("Could not use reset and mark operations.", e); } throw new ArchiveException("No Archiver found for the stream signature"); }
public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } if ( signatureLength >= 512 ) { try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; if ( tais . getNextTarEntry ( ) . isCheckSumOK ( ) ) { return new TarArchiveInputStream ( in ) ; } } catch ( Exception e ) { } } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; }
Compress
41
src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
219
324
ZipArchiveInputStream.getNextZipEntry() should differentiate between "invalid entry encountered" and "no more entries"
ZipArchiveInputStream.getNextZipEntry() currently returns null if an invalid entry is encountered. Thus, it's not possible to differentiate between "no more entries" and "invalid entry encountered" conditions. Instead, it should throw an exception if an invalid entry is encountered. I've created a test case and fix. I will submit a pull request shortly.
public ZipArchiveEntry getNextZipEntry() throws IOException { boolean firstEntry = true; if (closed || hitCentralDirectory) { return null; } if (current != null) { closeEntry(); firstEntry = false; } try { if (firstEntry) { // split archives have a special signature before the // first local file header - look for it and fail with // the appropriate error message if this is a split // archive. readFirstLocalFileHeader(LFH_BUF); } else { readFully(LFH_BUF); } } catch (final EOFException e) { return null; } final ZipLong sig = new ZipLong(LFH_BUF); if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) { hitCentralDirectory = true; skipRemainderOfArchive(); } if (!sig.equals(ZipLong.LFH_SIG)) { return null; } int off = WORD; current = new CurrentEntry(); final int versionMadeBy = ZipShort.getValue(LFH_BUF, off); off += SHORT; current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK); final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off); final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; current.hasDataDescriptor = gpFlag.usesDataDescriptor(); current.entry.setGeneralPurposeBit(gpFlag); off += SHORT; current.entry.setMethod(ZipShort.getValue(LFH_BUF, off)); off += SHORT; final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off)); current.entry.setTime(time); off += WORD; ZipLong size = null, cSize = null; if (!current.hasDataDescriptor) { current.entry.setCrc(ZipLong.getValue(LFH_BUF, off)); off += WORD; cSize = new ZipLong(LFH_BUF, off); off += WORD; size = new ZipLong(LFH_BUF, off); off += WORD; } else { off += 3 * WORD; } final int fileNameLen = ZipShort.getValue(LFH_BUF, off); off += SHORT; final int extraLen = ZipShort.getValue(LFH_BUF, off); off += SHORT; final byte[] fileName = new byte[fileNameLen]; readFully(fileName); current.entry.setName(entryEncoding.decode(fileName), fileName); final byte[] extraData = new byte[extraLen]; readFully(extraData); current.entry.setExtra(extraData); if (!hasUTF8Flag && useUnicodeExtraFields) { ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null); } processZip64Extra(size, cSize); if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) { if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) { current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); } else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) { current.in = new ExplodingInputStream( current.entry.getGeneralPurposeBit().getSlidingDictionarySize(), current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(), new BoundedInputStream(in, current.entry.getCompressedSize())); } else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) { current.in = new BZip2CompressorInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); } } entriesRead++; return current.entry; }
public ZipArchiveEntry getNextZipEntry ( ) throws IOException { boolean firstEntry = true ; if ( closed || hitCentralDirectory ) { return null ; } if ( current != null ) { closeEntry ( ) ; firstEntry = false ; } try { if ( firstEntry ) { readFirstLocalFileHeader ( LFH_BUF ) ; } else { readFully ( LFH_BUF ) ; } } catch ( final EOFException e ) { return null ; } final ZipLong sig = new ZipLong ( LFH_BUF ) ; if ( sig . equals ( ZipLong . CFH_SIG ) || sig . equals ( ZipLong . AED_SIG ) ) { hitCentralDirectory = true ; skipRemainderOfArchive ( ) ; } if ( ! sig . equals ( ZipLong . LFH_SIG ) ) { return null ; } int off = WORD ; current = new CurrentEntry ( ) ; final int versionMadeBy = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; current . entry . setPlatform ( ( versionMadeBy >> ZipFile . BYTE_SHIFT ) & ZipFile . NIBLET_MASK ) ; final GeneralPurposeBit gpFlag = GeneralPurposeBit . parse ( LFH_BUF , off ) ; final boolean hasUTF8Flag = gpFlag . usesUTF8ForNames ( ) ; final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper . UTF8_ZIP_ENCODING : zipEncoding ; current . hasDataDescriptor = gpFlag . usesDataDescriptor ( ) ; current . entry . setGeneralPurposeBit ( gpFlag ) ; off += SHORT ; current . entry . setMethod ( ZipShort . getValue ( LFH_BUF , off ) ) ; off += SHORT ; final long time = ZipUtil . dosToJavaTime ( ZipLong . getValue ( LFH_BUF , off ) ) ; current . entry . setTime ( time ) ; off += WORD ; ZipLong size = null , cSize = null ; if ( ! current . hasDataDescriptor ) { current . entry . setCrc ( ZipLong . getValue ( LFH_BUF , off ) ) ; off += WORD ; cSize = new ZipLong ( LFH_BUF , off ) ; off += WORD ; size = new ZipLong ( LFH_BUF , off ) ; off += WORD ; } else { off += 3 * WORD ; } final int fileNameLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final int extraLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final byte [ ] fileName = new byte [ fileNameLen ] ; readFully ( fileName ) ; current . entry . setName ( entryEncoding . decode ( fileName ) , fileName ) ; final byte [ ] extraData = new byte [ extraLen ] ; readFully ( extraData ) ; current . entry . setExtra ( extraData ) ; if ( ! hasUTF8Flag && useUnicodeExtraFields ) { ZipUtil . setNameAndCommentFromExtraFields ( current . entry , fileName , null ) ; } processZip64Extra ( size , cSize ) ; if ( current . entry . getCompressedSize ( ) != ArchiveEntry . SIZE_UNKNOWN ) { if ( current . entry . getMethod ( ) == ZipMethod . UNSHRINKING . getCode ( ) ) { current . in = new UnshrinkingInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . IMPLODING . getCode ( ) ) { current . in = new ExplodingInputStream ( current . entry . getGeneralPurposeBit ( ) . getSlidingDictionarySize ( ) , current . entry . getGeneralPurposeBit ( ) . getNumberOfShannonFanoTrees ( ) , new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . BZIP2 . getCode ( ) ) { current . in = new BZip2CompressorInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } } entriesRead ++ ; return current . entry ; }
public ZipArchiveEntry getNextZipEntry() throws IOException { boolean firstEntry = true; if (closed || hitCentralDirectory) { return null; } if (current != null) { closeEntry(); firstEntry = false; } try { if (firstEntry) { // split archives have a special signature before the // first local file header - look for it and fail with // the appropriate error message if this is a split // archive. readFirstLocalFileHeader(LFH_BUF); } else { readFully(LFH_BUF); } } catch (final EOFException e) { return null; } final ZipLong sig = new ZipLong(LFH_BUF); if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) { hitCentralDirectory = true; skipRemainderOfArchive(); return null; } if (!sig.equals(ZipLong.LFH_SIG)) { throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue())); } int off = WORD; current = new CurrentEntry(); final int versionMadeBy = ZipShort.getValue(LFH_BUF, off); off += SHORT; current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK); final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off); final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; current.hasDataDescriptor = gpFlag.usesDataDescriptor(); current.entry.setGeneralPurposeBit(gpFlag); off += SHORT; current.entry.setMethod(ZipShort.getValue(LFH_BUF, off)); off += SHORT; final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off)); current.entry.setTime(time); off += WORD; ZipLong size = null, cSize = null; if (!current.hasDataDescriptor) { current.entry.setCrc(ZipLong.getValue(LFH_BUF, off)); off += WORD; cSize = new ZipLong(LFH_BUF, off); off += WORD; size = new ZipLong(LFH_BUF, off); off += WORD; } else { off += 3 * WORD; } final int fileNameLen = ZipShort.getValue(LFH_BUF, off); off += SHORT; final int extraLen = ZipShort.getValue(LFH_BUF, off); off += SHORT; final byte[] fileName = new byte[fileNameLen]; readFully(fileName); current.entry.setName(entryEncoding.decode(fileName), fileName); final byte[] extraData = new byte[extraLen]; readFully(extraData); current.entry.setExtra(extraData); if (!hasUTF8Flag && useUnicodeExtraFields) { ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null); } processZip64Extra(size, cSize); if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) { if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) { current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); } else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) { current.in = new ExplodingInputStream( current.entry.getGeneralPurposeBit().getSlidingDictionarySize(), current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(), new BoundedInputStream(in, current.entry.getCompressedSize())); } else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) { current.in = new BZip2CompressorInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); } } entriesRead++; return current.entry; }
public ZipArchiveEntry getNextZipEntry ( ) throws IOException { boolean firstEntry = true ; if ( closed || hitCentralDirectory ) { return null ; } if ( current != null ) { closeEntry ( ) ; firstEntry = false ; } try { if ( firstEntry ) { readFirstLocalFileHeader ( LFH_BUF ) ; } else { readFully ( LFH_BUF ) ; } } catch ( final EOFException e ) { return null ; } final ZipLong sig = new ZipLong ( LFH_BUF ) ; if ( sig . equals ( ZipLong . CFH_SIG ) || sig . equals ( ZipLong . AED_SIG ) ) { hitCentralDirectory = true ; skipRemainderOfArchive ( ) ; return null ; } if ( ! sig . equals ( ZipLong . LFH_SIG ) ) { throw new ZipException ( String . format ( "Unexpected record signature: 0X%X" , sig . getValue ( ) ) ) ; } int off = WORD ; current = new CurrentEntry ( ) ; final int versionMadeBy = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; current . entry . setPlatform ( ( versionMadeBy >> ZipFile . BYTE_SHIFT ) & ZipFile . NIBLET_MASK ) ; final GeneralPurposeBit gpFlag = GeneralPurposeBit . parse ( LFH_BUF , off ) ; final boolean hasUTF8Flag = gpFlag . usesUTF8ForNames ( ) ; final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper . UTF8_ZIP_ENCODING : zipEncoding ; current . hasDataDescriptor = gpFlag . usesDataDescriptor ( ) ; current . entry . setGeneralPurposeBit ( gpFlag ) ; off += SHORT ; current . entry . setMethod ( ZipShort . getValue ( LFH_BUF , off ) ) ; off += SHORT ; final long time = ZipUtil . dosToJavaTime ( ZipLong . getValue ( LFH_BUF , off ) ) ; current . entry . setTime ( time ) ; off += WORD ; ZipLong size = null , cSize = null ; if ( ! current . hasDataDescriptor ) { current . entry . setCrc ( ZipLong . getValue ( LFH_BUF , off ) ) ; off += WORD ; cSize = new ZipLong ( LFH_BUF , off ) ; off += WORD ; size = new ZipLong ( LFH_BUF , off ) ; off += WORD ; } else { off += 3 * WORD ; } final int fileNameLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final int extraLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final byte [ ] fileName = new byte [ fileNameLen ] ; readFully ( fileName ) ; current . entry . setName ( entryEncoding . decode ( fileName ) , fileName ) ; final byte [ ] extraData = new byte [ extraLen ] ; readFully ( extraData ) ; current . entry . setExtra ( extraData ) ; if ( ! hasUTF8Flag && useUnicodeExtraFields ) { ZipUtil . setNameAndCommentFromExtraFields ( current . entry , fileName , null ) ; } processZip64Extra ( size , cSize ) ; if ( current . entry . getCompressedSize ( ) != ArchiveEntry . SIZE_UNKNOWN ) { if ( current . entry . getMethod ( ) == ZipMethod . UNSHRINKING . getCode ( ) ) { current . in = new UnshrinkingInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . IMPLODING . getCode ( ) ) { current . in = new ExplodingInputStream ( current . entry . getGeneralPurposeBit ( ) . getSlidingDictionarySize ( ) , current . entry . getGeneralPurposeBit ( ) . getNumberOfShannonFanoTrees ( ) , new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . BZIP2 . getCode ( ) ) { current . in = new BZip2CompressorInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } } entriesRead ++ ; return current . entry ; }
JacksonDatabind
93
src/main/java/com/fasterxml/jackson/databind/jsontype/impl/SubTypeValidator.java
67
99
`NullPointerException` in `SubTypeValidator.validateSubType` when validating Spring interface
In jackson-databind-2.8.11 jackson-databind-2.9.3 and jackson-databind-2.9.4-SNAPSHOT `SubTypeValidator.validateSubType` fails with a `NullPointerException` if the `JavaType.getRawClass()` is an interface that starts with `org.springframework.` For example, the following will fail: ```java package org.springframework.security.core; import java.util.*; public class Authentication { private List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>(); public List<GrantedAuthority> getAuthorities() { return this.authorities; } public void setAuthorities(List<GrantedAuthority> authorities) { this.authorities = authorities; } } ``` ```java package org.springframework.security.core; public interface GrantedAuthority { String getAuthority(); } ``` ```java @Test public void validateSubTypeFailsWithNPE() throws Exception { ObjectMapper mapper = new ObjectMapper(); mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY); String json = "{\"@class\":\"org.springframework.security.core.Authentication\",\"authorities\":[\"java.util.ArrayList\",[]]}"; Authentication authentication = mapper.readValue(json, Authentication.class); } ``` with the following stacktrace: ``` java.lang.NullPointerException at com.fasterxml.jackson.databind.jsontype.impl.SubTypeValidator.validateSubType(SubTypeValidator.java:86) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory._validateSubType(BeanDeserializerFactory.java:916) at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.createBeanDeserializer(BeanDeserializerFactory.java:135) at com.fasterxml.jackson.databind.deser.DeserializerCache._createDeserializer2(DeserializerCache.java:411) at com.fasterxml.jackson.databind.deser.DeserializerCache._createDeserializer(DeserializerCache.java:349) at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:264) at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244) at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142) at com.fasterxml.jackson.databind.DeserializationContext.findContextualValueDeserializer(DeserializationContext.java:444) at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.createContextual(CollectionDeserializer.java:183) at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.createContextual(CollectionDeserializer.java:27) at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651) at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:471) at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293) at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244) at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142) at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477) at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4178) at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3997) at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2992) ``` In prior versions, the test works.
public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException { // There are certain nasty classes that could cause problems, mostly // via default typing -- catch them here. final Class<?> raw = type.getRawClass(); String full = raw.getName(); main_check: do { if (_cfgIllegalClassNames.contains(full)) { break; } // 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling // for some Spring framework types // 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces if (full.startsWith(PREFIX_STRING)) { for (Class<?> cls = raw; cls != Object.class; cls = cls.getSuperclass()) { String name = cls.getSimpleName(); // looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there? if ("AbstractPointcutAdvisor".equals(name) // ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts || "AbstractApplicationContext".equals(name)) { break main_check; } } } return; } while (false); throw JsonMappingException.from(ctxt, String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); }
public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( full . startsWith ( PREFIX_STRING ) ) { for ( Class < ? > cls = raw ; cls != Object . class ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; }
public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException { // There are certain nasty classes that could cause problems, mostly // via default typing -- catch them here. final Class<?> raw = type.getRawClass(); String full = raw.getName(); main_check: do { if (_cfgIllegalClassNames.contains(full)) { break; } // 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling // for some Spring framework types // 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces if (!raw.isInterface() && full.startsWith(PREFIX_STRING)) { for (Class<?> cls = raw; (cls != null) && (cls != Object.class); cls = cls.getSuperclass()) { String name = cls.getSimpleName(); // looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there? if ("AbstractPointcutAdvisor".equals(name) // ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts || "AbstractApplicationContext".equals(name)) { break main_check; } } } return; } while (false); throw JsonMappingException.from(ctxt, String.format("Illegal type (%s) to deserialize: prevented for security reasons", full)); }
public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( ! raw . isInterface ( ) && full . startsWith ( PREFIX_STRING ) ) { for ( Class < ? > cls = raw ; ( cls != null ) && ( cls != Object . class ) ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; }
Math
2
src/main/java/org/apache/commons/math3/distribution/HypergeometricDistribution.java
267
269
HypergeometricDistribution.sample suffers from integer overflow
Hi, I have an application which broke when ported from commons math 2.2 to 3.2. It looks like the HypergeometricDistribution.sample() method doesn't work as well as it used to with large integer values -- the example code below should return a sample between 0 and 50, but usually returns -50. {code} import org.apache.commons.math3.distribution.HypergeometricDistribution; public class Foo { public static void main(String[] args) { HypergeometricDistribution a = new HypergeometricDistribution( 43130568, 42976365, 50); System.out.printf("%d %d%n", a.getSupportLowerBound(), a.getSupportUpperBound()); // Prints "0 50" System.out.printf("%d%n",a.sample()); // Prints "-50" } } {code} In the debugger, I traced it as far as an integer overflow in HypergeometricDistribution.getNumericalMean() -- instead of doing {code} return (double) (getSampleSize() * getNumberOfSuccesses()) / (double) getPopulationSize(); {code} it could do: {code} return getSampleSize() * ((double) getNumberOfSuccesses() / (double) getPopulationSize()); {code} This seemed to fix it, based on a quick test.
public double getNumericalMean() { return (double) (getSampleSize() * getNumberOfSuccesses()) / (double) getPopulationSize(); }
public double getNumericalMean ( ) { return ( double ) ( getSampleSize ( ) * getNumberOfSuccesses ( ) ) / ( double ) getPopulationSize ( ) ; }
public double getNumericalMean() { return getSampleSize() * (getNumberOfSuccesses() / (double) getPopulationSize()); }
public double getNumericalMean ( ) { return getSampleSize ( ) * ( getNumberOfSuccesses ( ) / ( double ) getPopulationSize ( ) ) ; }
Math
58
src/main/java/org/apache/commons/math/optimization/fitting/GaussianFitter.java
119
122
GaussianFitter Unexpectedly Throws NotStrictlyPositiveException
Running the following: double[] observations = { 1.1143831578403364E-29, 4.95281403484594E-28, 1.1171347211930288E-26, 1.7044813962636277E-25, 1.9784716574832164E-24, 1.8630236407866774E-23, 1.4820532905097742E-22, 1.0241963854632831E-21, 6.275077366673128E-21, 3.461808994532493E-20, 1.7407124684715706E-19, 8.056687953553974E-19, 3.460193945992071E-18, 1.3883326374011525E-17, 5.233894983671116E-17, 1.8630791465263745E-16, 6.288759227922111E-16, 2.0204433920597856E-15, 6.198768938576155E-15, 1.821419346860626E-14, 5.139176445538471E-14, 1.3956427429045787E-13, 3.655705706448139E-13, 9.253753324779779E-13, 2.267636001476696E-12, 5.3880460095836855E-12, 1.2431632654852931E-11 }; GaussianFitter g = new GaussianFitter(new LevenbergMarquardtOptimizer()); for (int index = 0; index < 27; index++) { g.addObservedPoint(index, observations[index]); } g.fit(); Results in: org.apache.commons.math.exception.NotStrictlyPositiveException: -1.277 is smaller than, or equal to, the minimum (0) at org.apache.commons.math.analysis.function.Gaussian$Parametric.validateParameters(Gaussian.java:184) at org.apache.commons.math.analysis.function.Gaussian$Parametric.value(Gaussian.java:129) I'm guessing the initial guess for sigma is off.
public double[] fit() { final double[] guess = (new ParameterGuesser(getObservations())).guess(); return fit(new Gaussian.Parametric(), guess); }
public double [ ] fit ( ) { final double [ ] guess = ( new ParameterGuesser ( getObservations ( ) ) ) . guess ( ) ; return fit ( new Gaussian . Parametric ( ) , guess ) ; }
public double[] fit() { final double[] guess = (new ParameterGuesser(getObservations())).guess(); return fit(guess); }
public double [ ] fit ( ) { final double [ ] guess = ( new ParameterGuesser ( getObservations ( ) ) ) . guess ( ) ; return fit ( guess ) ; }
JacksonDatabind
85
src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java
48
95
`DateTimeSerializerBase` ignores configured date format when creating contextual
`DateTimeSerializerBase#createContextual` creates a new serializer with `StdDateFormat.DATE_FORMAT_STR_ISO8601` format instead of re-using the actual format that may have been specified on the configuration. See the following code: ``` final String pattern = format.hasPattern() ? format.getPattern() : StdDateFormat.DATE_FORMAT_STR_ISO8601; ``` Using the `@JsonFormat` annotation on a field will therefore reset the format to Jackson's default even if the annotation doesn't specify any custom format. `DateBasedDeserializer#createContextual` behaves differently and tries to re-use the configured format: ``` DateFormat df = ctxt.getConfig().getDateFormat(); // one shortcut: with our custom format, can simplify handling a bit if (df.getClass() == StdDateFormat.class) { ... StdDateFormat std = (StdDateFormat) df; std = std.withTimeZone(tz); ... } else { // otherwise need to clone, re-set timezone: df = (DateFormat) df.clone(); df.setTimeZone(tz); } ``` Shouldn't the serializer follow the same approach ?
@Override public JsonSerializer<?> createContextual(SerializerProvider serializers, BeanProperty property) throws JsonMappingException { if (property == null) { return this; } JsonFormat.Value format = findFormatOverrides(serializers, property, handledType()); if (format == null) { return this; } // Simple case first: serialize as numeric timestamp? JsonFormat.Shape shape = format.getShape(); if (shape.isNumeric()) { return withFormat(Boolean.TRUE, null); } // 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky.. // First: custom pattern will override things if ((shape == JsonFormat.Shape.STRING) || format.hasPattern() || format.hasLocale() || format.hasTimeZone()) { TimeZone tz = format.getTimeZone(); final String pattern = format.hasPattern() ? format.getPattern() : StdDateFormat.DATE_FORMAT_STR_ISO8601; final Locale loc = format.hasLocale() ? format.getLocale() : serializers.getLocale(); SimpleDateFormat df = new SimpleDateFormat(pattern, loc); if (tz == null) { tz = serializers.getTimeZone(); } df.setTimeZone(tz); return withFormat(Boolean.FALSE, df); } // Otherwise, need one of these changes: // Jackson's own `StdDateFormat` is quite easy to deal with... // 08-Jun-2017, tatu: Unfortunately there's no generally usable // mechanism for changing `DateFormat` instances (or even clone()ing) // So: require it be `SimpleDateFormat`; can't config other types // serializers.reportBadDefinition(handledType(), String.format( // Ugh. No way to change `Locale`, create copy; must re-crete completely: return this; }
@ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( ( shape == JsonFormat . Shape . STRING ) || format . hasPattern ( ) || format . hasLocale ( ) || format . hasTimeZone ( ) ) { TimeZone tz = format . getTimeZone ( ) ; final String pattern = format . hasPattern ( ) ? format . getPattern ( ) : StdDateFormat . DATE_FORMAT_STR_ISO8601 ; final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( pattern , loc ) ; if ( tz == null ) { tz = serializers . getTimeZone ( ) ; } df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } return this ; }
@Override public JsonSerializer<?> createContextual(SerializerProvider serializers, BeanProperty property) throws JsonMappingException { if (property == null) { return this; } JsonFormat.Value format = findFormatOverrides(serializers, property, handledType()); if (format == null) { return this; } // Simple case first: serialize as numeric timestamp? JsonFormat.Shape shape = format.getShape(); if (shape.isNumeric()) { return withFormat(Boolean.TRUE, null); } // 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky.. // First: custom pattern will override things if (format.hasPattern()) { final Locale loc = format.hasLocale() ? format.getLocale() : serializers.getLocale(); SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc); TimeZone tz = format.hasTimeZone() ? format.getTimeZone() : serializers.getTimeZone(); df.setTimeZone(tz); return withFormat(Boolean.FALSE, df); } // Otherwise, need one of these changes: final boolean hasLocale = format.hasLocale(); final boolean hasTZ = format.hasTimeZone(); final boolean asString = (shape == JsonFormat.Shape.STRING); if (!hasLocale && !hasTZ && !asString) { return this; } DateFormat df0 = serializers.getConfig().getDateFormat(); // Jackson's own `StdDateFormat` is quite easy to deal with... if (df0 instanceof StdDateFormat) { StdDateFormat std = (StdDateFormat) df0; if (format.hasLocale()) { std = std.withLocale(format.getLocale()); } if (format.hasTimeZone()) { std = std.withTimeZone(format.getTimeZone()); } return withFormat(Boolean.FALSE, std); } // 08-Jun-2017, tatu: Unfortunately there's no generally usable // mechanism for changing `DateFormat` instances (or even clone()ing) // So: require it be `SimpleDateFormat`; can't config other types if (!(df0 instanceof SimpleDateFormat)) { // serializers.reportBadDefinition(handledType(), String.format( serializers.reportMappingProblem( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; can not configure `Locale` or `TimeZone`", df0.getClass().getName()); } SimpleDateFormat df = (SimpleDateFormat) df0; if (hasLocale) { // Ugh. No way to change `Locale`, create copy; must re-crete completely: df = new SimpleDateFormat(df.toPattern(), format.getLocale()); } else { df = (SimpleDateFormat) df.clone(); } TimeZone newTz = format.getTimeZone(); boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone()); if (changeTZ) { df.setTimeZone(newTz); } return withFormat(Boolean.FALSE, df); }
@ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportMappingProblem ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; can not configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; }
JacksonDatabind
1
src/main/java/com/fasterxml/jackson/databind/ser/BeanPropertyWriter.java
582
624
NULL values are duplicated when serializing as array [via @JsonFormat(shape = JsonFormat.Shape.ARRAY)]
Example: ``` java public class TestOuter { @JsonFormat(shape = JsonFormat.Shape.ARRAY) public ArrayList<TestInner> array; public TestOuter() { this.array = new ArrayList<TestInner>(); this.array.add(new TestInner(1, "one")); this.array.add(new TestInner(0, null)); } private class TestInner { public int i; public String mayBeNull; public TestInner(int i, String s) { this.i = i; this.mayBeNull = s; } } } ``` Serializing an instance of TestOuter will produce the following incorrect result (as of Jackson 2.2.1): ``` json "array": [[1, "one"], [0, null, null]] ``` where the null value is duplicated. The expected result would be: ``` json "array": [[1, "one"], [0, null]] ``` I tracked the issue down to: ``` java package com.fasterxml.jackson.databind.ser; // ... public class BeanPropertyWriter { // ... public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov) throws Exception { Object value = get(bean); if (value == null) { // nulls need specialized handling if (_nullSerializer != null) { _nullSerializer.serialize(null, jgen, prov); } else { // can NOT suppress entries in tabular output jgen.writeNull(); } } // otherwise find serializer to use JsonSerializer<Object> ser = _serializer; // ... ... ``` where I suspect there is a missing "return", to exit the function once handling of the null value in the dedicated branch is done. As it is now, a null value is first serialized in the dedicated branch (jgen.writeNull()), and then execution continues on the "normal" (non-null) path and eventually the value is serialized once again.
public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov) throws Exception { Object value = get(bean); if (value == null) { // nulls need specialized handling if (_nullSerializer != null) { _nullSerializer.serialize(null, jgen, prov); } else { // can NOT suppress entries in tabular output jgen.writeNull(); } } // otherwise find serializer to use JsonSerializer<Object> ser = _serializer; if (ser == null) { Class<?> cls = value.getClass(); PropertySerializerMap map = _dynamicSerializers; ser = map.serializerFor(cls); if (ser == null) { ser = _findAndAddDynamic(map, cls, prov); } } // and then see if we must suppress certain values (default, empty) if (_suppressableValue != null) { if (MARKER_FOR_EMPTY == _suppressableValue) { if (ser.isEmpty(value)) { // can NOT suppress entries in tabular output serializeAsPlaceholder(bean, jgen, prov); return; } } else if (_suppressableValue.equals(value)) { // can NOT suppress entries in tabular output serializeAsPlaceholder(bean, jgen, prov); return; } } // For non-nulls: simple check for direct cycles if (value == bean) { _handleSelfReference(bean, ser); } if (_typeSerializer == null) { ser.serialize(value, jgen, prov); } else { ser.serializeWithType(value, jgen, prov, _typeSerializer); } }
public void serializeAsColumn ( Object bean , JsonGenerator jgen , SerializerProvider prov ) throws Exception { Object value = get ( bean ) ; if ( value == null ) { if ( _nullSerializer != null ) { _nullSerializer . serialize ( null , jgen , prov ) ; } else { jgen . writeNull ( ) ; } } JsonSerializer < Object > ser = _serializer ; if ( ser == null ) { Class < ? > cls = value . getClass ( ) ; PropertySerializerMap map = _dynamicSerializers ; ser = map . serializerFor ( cls ) ; if ( ser == null ) { ser = _findAndAddDynamic ( map , cls , prov ) ; } } if ( _suppressableValue != null ) { if ( MARKER_FOR_EMPTY == _suppressableValue ) { if ( ser . isEmpty ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } else if ( _suppressableValue . equals ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } if ( value == bean ) { _handleSelfReference ( bean , ser ) ; } if ( _typeSerializer == null ) { ser . serialize ( value , jgen , prov ) ; } else { ser . serializeWithType ( value , jgen , prov , _typeSerializer ) ; } }
public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov) throws Exception { Object value = get(bean); if (value == null) { // nulls need specialized handling if (_nullSerializer != null) { _nullSerializer.serialize(null, jgen, prov); } else { // can NOT suppress entries in tabular output jgen.writeNull(); } return; } // otherwise find serializer to use JsonSerializer<Object> ser = _serializer; if (ser == null) { Class<?> cls = value.getClass(); PropertySerializerMap map = _dynamicSerializers; ser = map.serializerFor(cls); if (ser == null) { ser = _findAndAddDynamic(map, cls, prov); } } // and then see if we must suppress certain values (default, empty) if (_suppressableValue != null) { if (MARKER_FOR_EMPTY == _suppressableValue) { if (ser.isEmpty(value)) { // can NOT suppress entries in tabular output serializeAsPlaceholder(bean, jgen, prov); return; } } else if (_suppressableValue.equals(value)) { // can NOT suppress entries in tabular output serializeAsPlaceholder(bean, jgen, prov); return; } } // For non-nulls: simple check for direct cycles if (value == bean) { _handleSelfReference(bean, ser); } if (_typeSerializer == null) { ser.serialize(value, jgen, prov); } else { ser.serializeWithType(value, jgen, prov, _typeSerializer); } }
public void serializeAsColumn ( Object bean , JsonGenerator jgen , SerializerProvider prov ) throws Exception { Object value = get ( bean ) ; if ( value == null ) { if ( _nullSerializer != null ) { _nullSerializer . serialize ( null , jgen , prov ) ; } else { jgen . writeNull ( ) ; } return ; } JsonSerializer < Object > ser = _serializer ; if ( ser == null ) { Class < ? > cls = value . getClass ( ) ; PropertySerializerMap map = _dynamicSerializers ; ser = map . serializerFor ( cls ) ; if ( ser == null ) { ser = _findAndAddDynamic ( map , cls , prov ) ; } } if ( _suppressableValue != null ) { if ( MARKER_FOR_EMPTY == _suppressableValue ) { if ( ser . isEmpty ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } else if ( _suppressableValue . equals ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } if ( value == bean ) { _handleSelfReference ( bean , ser ) ; } if ( _typeSerializer == null ) { ser . serialize ( value , jgen , prov ) ; } else { ser . serializeWithType ( value , jgen , prov , _typeSerializer ) ; } }
Math
74
src/main/java/org/apache/commons/math/ode/nonstiff/EmbeddedRungeKuttaIntegrator.java
191
359
Wrong parameter for first step size guess for Embedded Runge Kutta methods
In a space application using DOP853 i detected what seems to be a bad parameter in the call to the method initializeStep of class AdaptiveStepsizeIntegrator. Here, DormandPrince853Integrator is a subclass for EmbeddedRungeKuttaIntegrator which perform the call to initializeStep at the beginning of its method integrate(...) The problem comes from the array "scale" that is used as a parameter in the call off initializeStep(..) Following the theory described by Hairer in his book "Solving Ordinary Differential Equations 1 : Nonstiff Problems", the scaling should be : sci = Atol i + |y0i| * Rtoli Whereas EmbeddedRungeKuttaIntegrator uses : sci = Atoli Note that the Gragg-Bulirsch-Stoer integrator uses the good implementation "sci = Atol i + |y0i| * Rtoli " when he performs the call to the same method initializeStep(..) In the method initializeStep, the error leads to a wrong step size h used to perform an Euler step. Most of the time it is unvisible for the user. But in my space application the Euler step with this wrong step size h (much bigger than it should be) makes an exception occur (my satellite hits the ground...) To fix the bug, one should use the same algorithm as in the rescale method in GraggBulirschStoerIntegrator For exemple : final double[] scale= new double[y0.length];; if (vecAbsoluteTolerance == null) { for (int i = 0; i < scale.length; ++i) { final double yi = Math.max(Math.abs(y0[i]), Math.abs(y0[i])); scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * yi; } } else { for (int i = 0; i < scale.length; ++i) { final double yi = Math.max(Math.abs(y0[i]), Math.abs(y0[i])); scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * yi; } } hNew = initializeStep(equations, forward, getOrder(), scale, stepStart, y, yDotK[0], yTmp, yDotK[1]); Sorry for the length of this message, looking forward to hearing from you soon Vincent Morand
@Override public double integrate(final FirstOrderDifferentialEquations equations, final double t0, final double[] y0, final double t, final double[] y) throws DerivativeException, IntegratorException { sanityChecks(equations, t0, y0, t, y); setEquations(equations); resetEvaluations(); final boolean forward = t > t0; // create some internal working arrays final int stages = c.length + 1; if (y != y0) { System.arraycopy(y0, 0, y, 0, y0.length); } final double[][] yDotK = new double[stages][y0.length]; final double[] yTmp = new double[y0.length]; // set up an interpolator sharing the integrator arrays AbstractStepInterpolator interpolator; if (requiresDenseOutput() || (! eventsHandlersManager.isEmpty())) { final RungeKuttaStepInterpolator rki = (RungeKuttaStepInterpolator) prototype.copy(); rki.reinitialize(this, yTmp, yDotK, forward); interpolator = rki; } else { interpolator = new DummyStepInterpolator(yTmp, forward); } interpolator.storeTime(t0); // set up integration control objects stepStart = t0; double hNew = 0; boolean firstTime = true; for (StepHandler handler : stepHandlers) { handler.reset(); } CombinedEventsManager manager = addEndTimeChecker(t0, t, eventsHandlersManager); boolean lastStep = false; // main integration loop while (!lastStep) { interpolator.shift(); double error = 0; for (boolean loop = true; loop;) { if (firstTime || !fsal) { // first stage computeDerivatives(stepStart, y, yDotK[0]); } if (firstTime) { final double[] scale; if (vecAbsoluteTolerance == null) { scale = new double[y0.length]; java.util.Arrays.fill(scale, scalAbsoluteTolerance); } else { scale = vecAbsoluteTolerance; } hNew = initializeStep(equations, forward, getOrder(), scale, stepStart, y, yDotK[0], yTmp, yDotK[1]); firstTime = false; } stepSize = hNew; // next stages for (int k = 1; k < stages; ++k) { for (int j = 0; j < y0.length; ++j) { double sum = a[k-1][0] * yDotK[0][j]; for (int l = 1; l < k; ++l) { sum += a[k-1][l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]); } // estimate the state at the end of the step for (int j = 0; j < y0.length; ++j) { double sum = b[0] * yDotK[0][j]; for (int l = 1; l < stages; ++l) { sum += b[l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } // estimate the error at the end of the step error = estimateError(yDotK, y, yTmp, stepSize); if (error <= 1.0) { // discrete events handling interpolator.storeTime(stepStart + stepSize); if (manager.evaluateStep(interpolator)) { final double dt = manager.getEventTime() - stepStart; if (Math.abs(dt) <= Math.ulp(stepStart)) { // rejecting the step would lead to a too small next step, we accept it loop = false; } else { // reject the step to match exactly the next switch time hNew = dt; } } else { // accept the step loop = false; } } else { // reject the step and attempt to reduce error by stepsize control final double factor = Math.min(maxGrowth, Math.max(minReduction, safety * Math.pow(error, exp))); hNew = filterStep(stepSize * factor, forward, false); } } // the step has been accepted final double nextStep = stepStart + stepSize; System.arraycopy(yTmp, 0, y, 0, y0.length); manager.stepAccepted(nextStep, y); lastStep = manager.stop(); // provide the step data to the step handler interpolator.storeTime(nextStep); for (StepHandler handler : stepHandlers) { handler.handleStep(interpolator, lastStep); } stepStart = nextStep; if (fsal) { // save the last evaluation for the next step System.arraycopy(yDotK[stages - 1], 0, yDotK[0], 0, y0.length); } if (manager.reset(stepStart, y) && ! lastStep) { // some event handler has triggered changes that // invalidate the derivatives, we need to recompute them computeDerivatives(stepStart, y, yDotK[0]); } if (! lastStep) { // in some rare cases we may get here with stepSize = 0, for example // when an event occurs at integration start, reducing the first step // to zero; we have to reset the step to some safe non zero value stepSize = filterStep(stepSize, forward, true); // stepsize control for next step final double factor = Math.min(maxGrowth, Math.max(minReduction, safety * Math.pow(error, exp))); final double scaledH = stepSize * factor; final double nextT = stepStart + scaledH; final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t); hNew = filterStep(scaledH, forward, nextIsLast); } } final double stopTime = stepStart; resetInternalState(); return stopTime; }
@ Override public double integrate ( final FirstOrderDifferentialEquations equations , final double t0 , final double [ ] y0 , final double t , final double [ ] y ) throws DerivativeException , IntegratorException { sanityChecks ( equations , t0 , y0 , t , y ) ; setEquations ( equations ) ; resetEvaluations ( ) ; final boolean forward = t > t0 ; final int stages = c . length + 1 ; if ( y != y0 ) { System . arraycopy ( y0 , 0 , y , 0 , y0 . length ) ; } final double [ ] [ ] yDotK = new double [ stages ] [ y0 . length ] ; final double [ ] yTmp = new double [ y0 . length ] ; AbstractStepInterpolator interpolator ; if ( requiresDenseOutput ( ) || ( ! eventsHandlersManager . isEmpty ( ) ) ) { final RungeKuttaStepInterpolator rki = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; rki . reinitialize ( this , yTmp , yDotK , forward ) ; interpolator = rki ; } else { interpolator = new DummyStepInterpolator ( yTmp , forward ) ; } interpolator . storeTime ( t0 ) ; stepStart = t0 ; double hNew = 0 ; boolean firstTime = true ; for ( StepHandler handler : stepHandlers ) { handler . reset ( ) ; } CombinedEventsManager manager = addEndTimeChecker ( t0 , t , eventsHandlersManager ) ; boolean lastStep = false ; while ( ! lastStep ) { interpolator . shift ( ) ; double error = 0 ; for ( boolean loop = true ; loop ; ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale ; if ( vecAbsoluteTolerance == null ) { scale = new double [ y0 . length ] ; java . util . Arrays . fill ( scale , scalAbsoluteTolerance ) ; } else { scale = vecAbsoluteTolerance ; } hNew = initializeStep ( equations , forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error <= 1.0 ) { interpolator . storeTime ( stepStart + stepSize ) ; if ( manager . evaluateStep ( interpolator ) ) { final double dt = manager . getEventTime ( ) - stepStart ; if ( Math . abs ( dt ) <= Math . ulp ( stepStart ) ) { loop = false ; } else { hNew = dt ; } } else { loop = false ; } } else { final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } final double nextStep = stepStart + stepSize ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; manager . stepAccepted ( nextStep , y ) ; lastStep = manager . stop ( ) ; interpolator . storeTime ( nextStep ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , lastStep ) ; } stepStart = nextStep ; if ( fsal ) { System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } if ( manager . reset ( stepStart , y ) && ! lastStep ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( ! lastStep ) { stepSize = filterStep ( stepSize , forward , true ) ; final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; } } final double stopTime = stepStart ; resetInternalState ( ) ; return stopTime ; }
@Override public double integrate(final FirstOrderDifferentialEquations equations, final double t0, final double[] y0, final double t, final double[] y) throws DerivativeException, IntegratorException { sanityChecks(equations, t0, y0, t, y); setEquations(equations); resetEvaluations(); final boolean forward = t > t0; // create some internal working arrays final int stages = c.length + 1; if (y != y0) { System.arraycopy(y0, 0, y, 0, y0.length); } final double[][] yDotK = new double[stages][y0.length]; final double[] yTmp = new double[y0.length]; // set up an interpolator sharing the integrator arrays AbstractStepInterpolator interpolator; if (requiresDenseOutput() || (! eventsHandlersManager.isEmpty())) { final RungeKuttaStepInterpolator rki = (RungeKuttaStepInterpolator) prototype.copy(); rki.reinitialize(this, yTmp, yDotK, forward); interpolator = rki; } else { interpolator = new DummyStepInterpolator(yTmp, forward); } interpolator.storeTime(t0); // set up integration control objects stepStart = t0; double hNew = 0; boolean firstTime = true; for (StepHandler handler : stepHandlers) { handler.reset(); } CombinedEventsManager manager = addEndTimeChecker(t0, t, eventsHandlersManager); boolean lastStep = false; // main integration loop while (!lastStep) { interpolator.shift(); double error = 0; for (boolean loop = true; loop;) { if (firstTime || !fsal) { // first stage computeDerivatives(stepStart, y, yDotK[0]); } if (firstTime) { final double[] scale = new double[y0.length]; if (vecAbsoluteTolerance == null) { for (int i = 0; i < scale.length; ++i) { scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * Math.abs(y[i]); } } else { for (int i = 0; i < scale.length; ++i) { scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * Math.abs(y[i]); } } hNew = initializeStep(equations, forward, getOrder(), scale, stepStart, y, yDotK[0], yTmp, yDotK[1]); firstTime = false; } stepSize = hNew; // next stages for (int k = 1; k < stages; ++k) { for (int j = 0; j < y0.length; ++j) { double sum = a[k-1][0] * yDotK[0][j]; for (int l = 1; l < k; ++l) { sum += a[k-1][l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]); } // estimate the state at the end of the step for (int j = 0; j < y0.length; ++j) { double sum = b[0] * yDotK[0][j]; for (int l = 1; l < stages; ++l) { sum += b[l] * yDotK[l][j]; } yTmp[j] = y[j] + stepSize * sum; } // estimate the error at the end of the step error = estimateError(yDotK, y, yTmp, stepSize); if (error <= 1.0) { // discrete events handling interpolator.storeTime(stepStart + stepSize); if (manager.evaluateStep(interpolator)) { final double dt = manager.getEventTime() - stepStart; if (Math.abs(dt) <= Math.ulp(stepStart)) { // rejecting the step would lead to a too small next step, we accept it loop = false; } else { // reject the step to match exactly the next switch time hNew = dt; } } else { // accept the step loop = false; } } else { // reject the step and attempt to reduce error by stepsize control final double factor = Math.min(maxGrowth, Math.max(minReduction, safety * Math.pow(error, exp))); hNew = filterStep(stepSize * factor, forward, false); } } // the step has been accepted final double nextStep = stepStart + stepSize; System.arraycopy(yTmp, 0, y, 0, y0.length); manager.stepAccepted(nextStep, y); lastStep = manager.stop(); // provide the step data to the step handler interpolator.storeTime(nextStep); for (StepHandler handler : stepHandlers) { handler.handleStep(interpolator, lastStep); } stepStart = nextStep; if (fsal) { // save the last evaluation for the next step System.arraycopy(yDotK[stages - 1], 0, yDotK[0], 0, y0.length); } if (manager.reset(stepStart, y) && ! lastStep) { // some event handler has triggered changes that // invalidate the derivatives, we need to recompute them computeDerivatives(stepStart, y, yDotK[0]); } if (! lastStep) { // in some rare cases we may get here with stepSize = 0, for example // when an event occurs at integration start, reducing the first step // to zero; we have to reset the step to some safe non zero value stepSize = filterStep(stepSize, forward, true); // stepsize control for next step final double factor = Math.min(maxGrowth, Math.max(minReduction, safety * Math.pow(error, exp))); final double scaledH = stepSize * factor; final double nextT = stepStart + scaledH; final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t); hNew = filterStep(scaledH, forward, nextIsLast); } } final double stopTime = stepStart; resetInternalState(); return stopTime; }
@ Override public double integrate ( final FirstOrderDifferentialEquations equations , final double t0 , final double [ ] y0 , final double t , final double [ ] y ) throws DerivativeException , IntegratorException { sanityChecks ( equations , t0 , y0 , t , y ) ; setEquations ( equations ) ; resetEvaluations ( ) ; final boolean forward = t > t0 ; final int stages = c . length + 1 ; if ( y != y0 ) { System . arraycopy ( y0 , 0 , y , 0 , y0 . length ) ; } final double [ ] [ ] yDotK = new double [ stages ] [ y0 . length ] ; final double [ ] yTmp = new double [ y0 . length ] ; AbstractStepInterpolator interpolator ; if ( requiresDenseOutput ( ) || ( ! eventsHandlersManager . isEmpty ( ) ) ) { final RungeKuttaStepInterpolator rki = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; rki . reinitialize ( this , yTmp , yDotK , forward ) ; interpolator = rki ; } else { interpolator = new DummyStepInterpolator ( yTmp , forward ) ; } interpolator . storeTime ( t0 ) ; stepStart = t0 ; double hNew = 0 ; boolean firstTime = true ; for ( StepHandler handler : stepHandlers ) { handler . reset ( ) ; } CombinedEventsManager manager = addEndTimeChecker ( t0 , t , eventsHandlersManager ) ; boolean lastStep = false ; while ( ! lastStep ) { interpolator . shift ( ) ; double error = 0 ; for ( boolean loop = true ; loop ; ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale = new double [ y0 . length ] ; if ( vecAbsoluteTolerance == null ) { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = scalAbsoluteTolerance + scalRelativeTolerance * Math . abs ( y [ i ] ) ; } } else { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = vecAbsoluteTolerance [ i ] + vecRelativeTolerance [ i ] * Math . abs ( y [ i ] ) ; } } hNew = initializeStep ( equations , forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error <= 1.0 ) { interpolator . storeTime ( stepStart + stepSize ) ; if ( manager . evaluateStep ( interpolator ) ) { final double dt = manager . getEventTime ( ) - stepStart ; if ( Math . abs ( dt ) <= Math . ulp ( stepStart ) ) { loop = false ; } else { hNew = dt ; } } else { loop = false ; } } else { final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } final double nextStep = stepStart + stepSize ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; manager . stepAccepted ( nextStep , y ) ; lastStep = manager . stop ( ) ; interpolator . storeTime ( nextStep ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , lastStep ) ; } stepStart = nextStep ; if ( fsal ) { System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } if ( manager . reset ( stepStart , y ) && ! lastStep ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( ! lastStep ) { stepSize = filterStep ( stepSize , forward , true ) ; final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; } } final double stopTime = stepStart ; resetInternalState ( ) ; return stopTime ; }
JacksonCore
6
src/main/java/com/fasterxml/jackson/core/JsonPointer.java
185
206
`JsonPointer` should not consider "00" to be valid index
Although `00` can be parsed as `0` in some cases, it is not a valid JSON number; and is also not legal numeric index for JSON Pointer. As such, `JsonPointer` class should ensure it can only match property name "00" and not array index.
private final static int _parseIndex(String str) { final int len = str.length(); // [core#133]: beware of super long indexes; assume we never // have arrays over 2 billion entries so ints are fine. if (len == 0 || len > 10) { return -1; } // [core#176]: no leading zeroes allowed for (int i = 0; i < len; ++i) { char c = str.charAt(i); if (c > '9' || c < '0') { return -1; } } if (len == 10) { long l = NumberInput.parseLong(str); if (l > Integer.MAX_VALUE) { return -1; } } return NumberInput.parseInt(str); }
private final static int _parseIndex ( String str ) { final int len = str . length ( ) ; if ( len == 0 || len > 10 ) { return - 1 ; } for ( int i = 0 ; i < len ; ++ i ) { char c = str . charAt ( i ) ; if ( c > '9' || c < '0' ) { return - 1 ; } } if ( len == 10 ) { long l = NumberInput . parseLong ( str ) ; if ( l > Integer . MAX_VALUE ) { return - 1 ; } } return NumberInput . parseInt ( str ) ; }
private final static int _parseIndex(String str) { final int len = str.length(); // [core#133]: beware of super long indexes; assume we never // have arrays over 2 billion entries so ints are fine. if (len == 0 || len > 10) { return -1; } // [core#176]: no leading zeroes allowed char c = str.charAt(0); if (c <= '0') { return (len == 1 && c == '0') ? 0 : -1; } if (c > '9') { return -1; } for (int i = 1; i < len; ++i) { c = str.charAt(i); if (c > '9' || c < '0') { return -1; } } if (len == 10) { long l = NumberInput.parseLong(str); if (l > Integer.MAX_VALUE) { return -1; } } return NumberInput.parseInt(str); }
private final static int _parseIndex ( String str ) { final int len = str . length ( ) ; if ( len == 0 || len > 10 ) { return - 1 ; } char c = str . charAt ( 0 ) ; if ( c <= '0' ) { return ( len == 1 && c == '0' ) ? 0 : - 1 ; } if ( c > '9' ) { return - 1 ; } for ( int i = 1 ; i < len ; ++ i ) { c = str . charAt ( i ) ; if ( c > '9' || c < '0' ) { return - 1 ; } } if ( len == 10 ) { long l = NumberInput . parseLong ( str ) ; if ( l > Integer . MAX_VALUE ) { return - 1 ; } } return NumberInput . parseInt ( str ) ; }
JacksonDatabind
46
src/main/java/com/fasterxml/jackson/databind/type/ReferenceType.java
150
158
Incorrect signature for generic type via `JavaType.getGenericSignature
(see https://github.com/FasterXML/jackson-modules-base/issues/8 for background) It looks like generic signature generation is missing one closing `>` character to produce: ``` ()Ljava/util/concurrent/atomic/AtomicReference<Ljava/lang/String;; ``` instead of expected ``` ()Ljava/util/concurrent/atomic/AtomicReference<Ljava/lang/String;>; ``` that is, closing '>' is missing.
@Override public StringBuilder getGenericSignature(StringBuilder sb) { _classSignature(_class, sb, false); sb.append('<'); sb = _referencedType.getGenericSignature(sb); sb.append(';'); return sb; }
@ Override public StringBuilder getGenericSignature ( StringBuilder sb ) { _classSignature ( _class , sb , false ) ; sb . append ( '<' ) ; sb = _referencedType . getGenericSignature ( sb ) ; sb . append ( ';' ) ; return sb ; }
@Override public StringBuilder getGenericSignature(StringBuilder sb) { _classSignature(_class, sb, false); sb.append('<'); sb = _referencedType.getGenericSignature(sb); sb.append(">;"); return sb; }
@ Override public StringBuilder getGenericSignature ( StringBuilder sb ) { _classSignature ( _class , sb , false ) ; sb . append ( '<' ) ; sb = _referencedType . getGenericSignature ( sb ) ; sb . append ( ">;" ) ; return sb ; }
Math
23
src/main/java/org/apache/commons/math3/optimization/univariate/BrentOptimizer.java
114
281
"BrentOptimizer" not always reporting the best point
{{BrentOptimizer}} (package "o.a.c.m.optimization.univariate") does not check that the point it is going to return is indeed the best one it has encountered. Indeed, the last evaluated point might be slightly worse than the one before last.
@Override protected UnivariatePointValuePair doOptimize() { final boolean isMinim = getGoalType() == GoalType.MINIMIZE; final double lo = getMin(); final double mid = getStartValue(); final double hi = getMax(); // Optional additional convergence criteria. final ConvergenceChecker<UnivariatePointValuePair> checker = getConvergenceChecker(); double a; double b; if (lo < hi) { a = lo; b = hi; } else { a = hi; b = lo; } double x = mid; double v = x; double w = x; double d = 0; double e = 0; double fx = computeObjectiveValue(x); if (!isMinim) { fx = -fx; } double fv = fx; double fw = fx; UnivariatePointValuePair previous = null; UnivariatePointValuePair current = new UnivariatePointValuePair(x, isMinim ? fx : -fx); // Best point encountered so far (which is the initial guess). int iter = 0; while (true) { final double m = 0.5 * (a + b); final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold; final double tol2 = 2 * tol1; // Default stopping criterion. final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a); if (!stop) { double p = 0; double q = 0; double r = 0; double u = 0; if (FastMath.abs(e) > tol1) { // Fit parabola. r = (x - w) * (fx - fv); q = (x - v) * (fx - fw); p = (x - v) * q - (x - w) * r; q = 2 * (q - r); if (q > 0) { p = -p; } else { q = -q; } r = e; e = d; if (p > q * (a - x) && p < q * (b - x) && FastMath.abs(p) < FastMath.abs(0.5 * q * r)) { // Parabolic interpolation step. d = p / q; u = x + d; // f must not be evaluated too close to a or b. if (u - a < tol2 || b - u < tol2) { if (x <= m) { d = tol1; } else { d = -tol1; } } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } // Update by at least "tol1". if (FastMath.abs(d) < tol1) { if (d >= 0) { u = x + tol1; } else { u = x - tol1; } } else { u = x + d; } double fu = computeObjectiveValue(u); if (!isMinim) { fu = -fu; } // User-defined convergence checker. previous = current; current = new UnivariatePointValuePair(u, isMinim ? fu : -fu); if (checker != null) { if (checker.converged(iter, previous, current)) { return best(current, previous, isMinim); } } // Update a, b, v, w and x. if (fu <= fx) { if (u < x) { b = x; } else { a = x; } v = w; fv = fw; w = x; fw = fx; x = u; fx = fu; } else { if (u < x) { a = u; } else { b = u; } if (fu <= fw || Precision.equals(w, x)) { v = w; fv = fw; w = u; fw = fu; } else if (fu <= fv || Precision.equals(v, x) || Precision.equals(v, w)) { v = u; fv = fu; } } } else { // Default termination (Brent's criterion). return best(current, previous, isMinim); } ++iter; } }
@ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return best ( current , previous , isMinim ) ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return best ( current , previous , isMinim ) ; } ++ iter ; } }
@Override protected UnivariatePointValuePair doOptimize() { final boolean isMinim = getGoalType() == GoalType.MINIMIZE; final double lo = getMin(); final double mid = getStartValue(); final double hi = getMax(); // Optional additional convergence criteria. final ConvergenceChecker<UnivariatePointValuePair> checker = getConvergenceChecker(); double a; double b; if (lo < hi) { a = lo; b = hi; } else { a = hi; b = lo; } double x = mid; double v = x; double w = x; double d = 0; double e = 0; double fx = computeObjectiveValue(x); if (!isMinim) { fx = -fx; } double fv = fx; double fw = fx; UnivariatePointValuePair previous = null; UnivariatePointValuePair current = new UnivariatePointValuePair(x, isMinim ? fx : -fx); // Best point encountered so far (which is the initial guess). UnivariatePointValuePair best = current; int iter = 0; while (true) { final double m = 0.5 * (a + b); final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold; final double tol2 = 2 * tol1; // Default stopping criterion. final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a); if (!stop) { double p = 0; double q = 0; double r = 0; double u = 0; if (FastMath.abs(e) > tol1) { // Fit parabola. r = (x - w) * (fx - fv); q = (x - v) * (fx - fw); p = (x - v) * q - (x - w) * r; q = 2 * (q - r); if (q > 0) { p = -p; } else { q = -q; } r = e; e = d; if (p > q * (a - x) && p < q * (b - x) && FastMath.abs(p) < FastMath.abs(0.5 * q * r)) { // Parabolic interpolation step. d = p / q; u = x + d; // f must not be evaluated too close to a or b. if (u - a < tol2 || b - u < tol2) { if (x <= m) { d = tol1; } else { d = -tol1; } } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } } else { // Golden section step. if (x < m) { e = b - x; } else { e = a - x; } d = GOLDEN_SECTION * e; } // Update by at least "tol1". if (FastMath.abs(d) < tol1) { if (d >= 0) { u = x + tol1; } else { u = x - tol1; } } else { u = x + d; } double fu = computeObjectiveValue(u); if (!isMinim) { fu = -fu; } // User-defined convergence checker. previous = current; current = new UnivariatePointValuePair(u, isMinim ? fu : -fu); best = best(best, best(current, previous, isMinim), isMinim); if (checker != null) { if (checker.converged(iter, previous, current)) { return best; } } // Update a, b, v, w and x. if (fu <= fx) { if (u < x) { b = x; } else { a = x; } v = w; fv = fw; w = x; fw = fx; x = u; fx = fu; } else { if (u < x) { a = u; } else { b = u; } if (fu <= fw || Precision.equals(w, x)) { v = w; fv = fw; w = u; fw = fu; } else if (fu <= fv || Precision.equals(v, x) || Precision.equals(v, w)) { v = u; fv = fu; } } } else { // Default termination (Brent's criterion). return best(best, best(current, previous, isMinim), isMinim); } ++iter; } }
@ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; UnivariatePointValuePair best = current ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; best = best ( best , best ( current , previous , isMinim ) , isMinim ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return best ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return best ( best , best ( current , previous , isMinim ) , isMinim ) ; } ++ iter ; } }
JacksonDatabind
102
src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java
61
136
Cannot set custom format for `SqlDateSerializer` globally
Version: 2.9.5 After https://github.com/FasterXML/jackson-databind/issues/219 was fixed, the default format for `java.sql.Date` serialization switched from string to numeric, following the default value of `WRITE_DATES_AS_TIMESTAMPS`. In order to prevent breaks, I want `java.sql.Date` to serialize as a string, without changing behavior for `java.util.Date` (which has always serialized as a number by default). According to https://github.com/FasterXML/jackson-databind/issues/219#issuecomment-370690333, I should be able to revert the behavior for `java.sql.Date` only with ``` final ObjectMapper mapper = new ObjectMapper(); mapper.configOverride(java.sql.Date.class).setFormat(JsonFormat.Value.forPattern("yyyy-MM-dd")); ``` This doesn't seem to do anything, though. Looking at the code, it looks like it's because the custom format isn't actually added to `SqlDateSerializer` except in the `createContextual` method (https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java#L59). For now, I've reverted this behavior with ``` mapper.registerModule(new SimpleModule() { { addSerializer( java.sql.Date.class, new SqlDateSerializer().withFormat(false, new SimpleDateFormat("yyyy-MM-dd")) ); } }); ``` but it seems pretty hacky so I'd prefer the other method if possible.
@Override public JsonSerializer<?> createContextual(SerializerProvider serializers, BeanProperty property) throws JsonMappingException { // Note! Should not skip if `property` null since that'd skip check // for config overrides, in case of root value if (property == null) { return this; } JsonFormat.Value format = findFormatOverrides(serializers, property, handledType()); if (format == null) { return this; } // Simple case first: serialize as numeric timestamp? JsonFormat.Shape shape = format.getShape(); if (shape.isNumeric()) { return withFormat(Boolean.TRUE, null); } // 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky.. // First: custom pattern will override things if (format.hasPattern()) { final Locale loc = format.hasLocale() ? format.getLocale() : serializers.getLocale(); SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc); TimeZone tz = format.hasTimeZone() ? format.getTimeZone() : serializers.getTimeZone(); df.setTimeZone(tz); return withFormat(Boolean.FALSE, df); } // Otherwise, need one of these changes: final boolean hasLocale = format.hasLocale(); final boolean hasTZ = format.hasTimeZone(); final boolean asString = (shape == JsonFormat.Shape.STRING); if (!hasLocale && !hasTZ && !asString) { return this; } DateFormat df0 = serializers.getConfig().getDateFormat(); // Jackson's own `StdDateFormat` is quite easy to deal with... if (df0 instanceof StdDateFormat) { StdDateFormat std = (StdDateFormat) df0; if (format.hasLocale()) { std = std.withLocale(format.getLocale()); } if (format.hasTimeZone()) { std = std.withTimeZone(format.getTimeZone()); } return withFormat(Boolean.FALSE, std); } // 08-Jun-2017, tatu: Unfortunately there's no generally usable // mechanism for changing `DateFormat` instances (or even clone()ing) // So: require it be `SimpleDateFormat`; can't config other types if (!(df0 instanceof SimpleDateFormat)) { serializers.reportBadDefinition(handledType(), String.format( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`", df0.getClass().getName())); } SimpleDateFormat df = (SimpleDateFormat) df0; if (hasLocale) { // Ugh. No way to change `Locale`, create copy; must re-crete completely: df = new SimpleDateFormat(df.toPattern(), format.getLocale()); } else { df = (SimpleDateFormat) df.clone(); } TimeZone newTz = format.getTimeZone(); boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone()); if (changeTZ) { df.setTimeZone(newTz); } return withFormat(Boolean.FALSE, df); }
@ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportBadDefinition ( handledType ( ) , String . format ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; }
@Override public JsonSerializer<?> createContextual(SerializerProvider serializers, BeanProperty property) throws JsonMappingException { // Note! Should not skip if `property` null since that'd skip check // for config overrides, in case of root value JsonFormat.Value format = findFormatOverrides(serializers, property, handledType()); if (format == null) { return this; } // Simple case first: serialize as numeric timestamp? JsonFormat.Shape shape = format.getShape(); if (shape.isNumeric()) { return withFormat(Boolean.TRUE, null); } // 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky.. // First: custom pattern will override things if (format.hasPattern()) { final Locale loc = format.hasLocale() ? format.getLocale() : serializers.getLocale(); SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc); TimeZone tz = format.hasTimeZone() ? format.getTimeZone() : serializers.getTimeZone(); df.setTimeZone(tz); return withFormat(Boolean.FALSE, df); } // Otherwise, need one of these changes: final boolean hasLocale = format.hasLocale(); final boolean hasTZ = format.hasTimeZone(); final boolean asString = (shape == JsonFormat.Shape.STRING); if (!hasLocale && !hasTZ && !asString) { return this; } DateFormat df0 = serializers.getConfig().getDateFormat(); // Jackson's own `StdDateFormat` is quite easy to deal with... if (df0 instanceof StdDateFormat) { StdDateFormat std = (StdDateFormat) df0; if (format.hasLocale()) { std = std.withLocale(format.getLocale()); } if (format.hasTimeZone()) { std = std.withTimeZone(format.getTimeZone()); } return withFormat(Boolean.FALSE, std); } // 08-Jun-2017, tatu: Unfortunately there's no generally usable // mechanism for changing `DateFormat` instances (or even clone()ing) // So: require it be `SimpleDateFormat`; can't config other types if (!(df0 instanceof SimpleDateFormat)) { serializers.reportBadDefinition(handledType(), String.format( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`", df0.getClass().getName())); } SimpleDateFormat df = (SimpleDateFormat) df0; if (hasLocale) { // Ugh. No way to change `Locale`, create copy; must re-crete completely: df = new SimpleDateFormat(df.toPattern(), format.getLocale()); } else { df = (SimpleDateFormat) df.clone(); } TimeZone newTz = format.getTimeZone(); boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone()); if (changeTZ) { df.setTimeZone(newTz); } return withFormat(Boolean.FALSE, df); }
@ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportBadDefinition ( handledType ( ) , String . format ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; }
JacksonDatabind
11
src/main/java/com/fasterxml/jackson/databind/type/TypeFactory.java
889
930
Problem resolving locally declared generic type
(reported by Hal H) Case like: ``` java class Something { public <T extends Ruleform> T getEntity() public <T extends Ruleform> void setEntity(T entity) } ``` appears to fail on deserialization.
protected JavaType _fromVariable(TypeVariable<?> type, TypeBindings context) { final String name = type.getName(); // 19-Mar-2015: Without context, all we can check are bounds. if (context == null) { // And to prevent infinite loops, now need this: return _unknownType(); } else { // Ok: here's where context might come in handy! /* 19-Mar-2015, tatu: As per [databind#609], may need to allow * unresolved type variables to handle some cases where bounds * are enough. Let's hope it does not hide real fail cases. */ JavaType actualType = context.findType(name); if (actualType != null) { return actualType; } } /* 29-Jan-2010, tatu: We used to throw exception here, if type was * bound: but the problem is that this can occur for generic "base" * method, overridden by sub-class. If so, we will want to ignore * current type (for method) since it will be masked. */ Type[] bounds = type.getBounds(); // With type variables we must use bound information. // Theoretically this gets tricky, as there may be multiple // bounds ("... extends A & B"); and optimally we might // want to choose the best match. Also, bounds are optional; // but here we are lucky in that implicit "Object" is // added as bounds if so. // Either way let's just use the first bound, for now, and // worry about better match later on if there is need. /* 29-Jan-2010, tatu: One more problem are recursive types * (T extends Comparable<T>). Need to add "placeholder" * for resolution to catch those. */ context._addPlaceholder(name); return _constructType(bounds[0], context); }
protected JavaType _fromVariable ( TypeVariable < ? > type , TypeBindings context ) { final String name = type . getName ( ) ; if ( context == null ) { return _unknownType ( ) ; } else { JavaType actualType = context . findType ( name ) ; if ( actualType != null ) { return actualType ; } } Type [ ] bounds = type . getBounds ( ) ; context . _addPlaceholder ( name ) ; return _constructType ( bounds [ 0 ] , context ) ; }
protected JavaType _fromVariable(TypeVariable<?> type, TypeBindings context) { final String name = type.getName(); // 19-Mar-2015: Without context, all we can check are bounds. if (context == null) { // And to prevent infinite loops, now need this: context = new TypeBindings(this, (Class<?>) null); } else { // Ok: here's where context might come in handy! /* 19-Mar-2015, tatu: As per [databind#609], may need to allow * unresolved type variables to handle some cases where bounds * are enough. Let's hope it does not hide real fail cases. */ JavaType actualType = context.findType(name, false); if (actualType != null) { return actualType; } } /* 29-Jan-2010, tatu: We used to throw exception here, if type was * bound: but the problem is that this can occur for generic "base" * method, overridden by sub-class. If so, we will want to ignore * current type (for method) since it will be masked. */ Type[] bounds = type.getBounds(); // With type variables we must use bound information. // Theoretically this gets tricky, as there may be multiple // bounds ("... extends A & B"); and optimally we might // want to choose the best match. Also, bounds are optional; // but here we are lucky in that implicit "Object" is // added as bounds if so. // Either way let's just use the first bound, for now, and // worry about better match later on if there is need. /* 29-Jan-2010, tatu: One more problem are recursive types * (T extends Comparable<T>). Need to add "placeholder" * for resolution to catch those. */ context._addPlaceholder(name); return _constructType(bounds[0], context); }
protected JavaType _fromVariable ( TypeVariable < ? > type , TypeBindings context ) { final String name = type . getName ( ) ; if ( context == null ) { context = new TypeBindings ( this , ( Class < ? > ) null ) ; } else { JavaType actualType = context . findType ( name , false ) ; if ( actualType != null ) { return actualType ; } } Type [ ] bounds = type . getBounds ( ) ; context . _addPlaceholder ( name ) ; return _constructType ( bounds [ 0 ] , context ) ; }
Cli
4
src/java/org/apache/commons/cli/Parser.java
290
309
PosixParser interupts "-target opt" as "-t arget opt"
This was posted on the Commons-Developer list and confirmed as a bug. > Is this a bug? Or am I using this incorrectly? > I have an option with short and long values. Given code that is > essentially what is below, with a PosixParser I see results as > follows: > > A command line with just "-t" prints out the results of the catch > block > (OK) > A command line with just "-target" prints out the results of the catch > block (OK) > A command line with just "-t foobar.com" prints out "processing selected > target: foobar.com" (OK) > A command line with just "-target foobar.com" prints out "processing > selected target: arget" (ERROR?) > > ====================================================================== > == > ======================= > private static final String OPTION_TARGET = "t"; > private static final String OPTION_TARGET_LONG = "target"; > // ... > Option generateTarget = new Option(OPTION_TARGET, > OPTION_TARGET_LONG, > true, > "Generate files for the specified > target machine"); > // ... > try { > parsedLine = parser.parse(cmdLineOpts, args); > } catch (ParseException pe) { > System.out.println("Invalid command: " + pe.getMessage() + > "\n"); > HelpFormatter hf = new HelpFormatter(); > hf.printHelp(USAGE, cmdLineOpts); > System.exit(-1); > } > > if (parsedLine.hasOption(OPTION_TARGET)) { > System.out.println("processing selected target: " + > parsedLine.getOptionValue(OPTION_TARGET)); > } It is a bug but it is due to well defined behaviour (so that makes me feel a little better about myself ;). To support *special* (well I call them special anyway) like -Dsystem.property=value we need to be able to examine the first character of an option. If the first character is itself defined as an Option then the remainder of the token is used as the value, e.g. 'D' is the token, it is an option so 'system.property=value' is the argument value for that option. This is the behaviour that we are seeing for your example. 't' is the token, it is an options so 'arget' is the argument value. I suppose a solution to this could be to have a way to specify properties for parsers. In this case 'posix.special.option == true' for turning on *special* options. I'll have a look into this and let you know. Just to keep track of this and to get you used to how we operate, can you log a bug in bugzilla for this. Thanks, -John K
private void checkRequiredOptions() throws MissingOptionException { // if there are required options that have not been // processsed if (requiredOptions.size() > 0) { Iterator iter = requiredOptions.iterator(); StringBuffer buff = new StringBuffer(); // loop through the required options while (iter.hasNext()) { buff.append(iter.next()); } throw new MissingOptionException(buff.toString()); } }
private void checkRequiredOptions ( ) throws MissingOptionException { if ( requiredOptions . size ( ) > 0 ) { Iterator iter = requiredOptions . iterator ( ) ; StringBuffer buff = new StringBuffer ( ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; } throw new MissingOptionException ( buff . toString ( ) ) ; } }
private void checkRequiredOptions() throws MissingOptionException { // if there are required options that have not been // processsed if (requiredOptions.size() > 0) { Iterator iter = requiredOptions.iterator(); StringBuffer buff = new StringBuffer("Missing required option"); buff.append(requiredOptions.size() == 1 ? "" : "s"); buff.append(": "); // loop through the required options while (iter.hasNext()) { buff.append(iter.next()); } throw new MissingOptionException(buff.toString()); } }
private void checkRequiredOptions ( ) throws MissingOptionException { if ( requiredOptions . size ( ) > 0 ) { Iterator iter = requiredOptions . iterator ( ) ; StringBuffer buff = new StringBuffer ( "Missing required option" ) ; buff . append ( requiredOptions . size ( ) == 1 ? "" : "s" ) ; buff . append ( ": " ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; } throw new MissingOptionException ( buff . toString ( ) ) ; } }
Codec
9
src/java/org/apache/commons/codec/binary/Base64.java
822
837
Base64.encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) throws IAE for valid maxResultSize if isChunked is false
If isChunked is false, Base64.encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) throws IAE for valid maxResultSize. Test case and fix will be applied shortly.
public static byte[] encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) { if (binaryData == null || binaryData.length == 0) { return binaryData; } long len = getEncodeLength(binaryData, MIME_CHUNK_SIZE, CHUNK_SEPARATOR); if (len > maxResultSize) { throw new IllegalArgumentException("Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize); } Base64 b64 = isChunked ? new Base64(urlSafe) : new Base64(0, CHUNK_SEPARATOR, urlSafe); return b64.encode(binaryData); }
public static byte [ ] encodeBase64 ( byte [ ] binaryData , boolean isChunked , boolean urlSafe , int maxResultSize ) { if ( binaryData == null || binaryData . length == 0 ) { return binaryData ; } long len = getEncodeLength ( binaryData , MIME_CHUNK_SIZE , CHUNK_SEPARATOR ) ; if ( len > maxResultSize ) { throw new IllegalArgumentException ( "Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize ) ; } Base64 b64 = isChunked ? new Base64 ( urlSafe ) : new Base64 ( 0 , CHUNK_SEPARATOR , urlSafe ) ; return b64 . encode ( binaryData ) ; }
public static byte[] encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) { if (binaryData == null || binaryData.length == 0) { return binaryData; } long len = getEncodeLength(binaryData, isChunked ? MIME_CHUNK_SIZE : 0, CHUNK_SEPARATOR); if (len > maxResultSize) { throw new IllegalArgumentException("Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize); } Base64 b64 = isChunked ? new Base64(urlSafe) : new Base64(0, CHUNK_SEPARATOR, urlSafe); return b64.encode(binaryData); }
public static byte [ ] encodeBase64 ( byte [ ] binaryData , boolean isChunked , boolean urlSafe , int maxResultSize ) { if ( binaryData == null || binaryData . length == 0 ) { return binaryData ; } long len = getEncodeLength ( binaryData , isChunked ? MIME_CHUNK_SIZE : 0 , CHUNK_SEPARATOR ) ; if ( len > maxResultSize ) { throw new IllegalArgumentException ( "Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize ) ; } Base64 b64 = isChunked ? new Base64 ( urlSafe ) : new Base64 ( 0 , CHUNK_SEPARATOR , urlSafe ) ; return b64 . encode ( binaryData ) ; }
JacksonCore
26
src/main/java/com/fasterxml/jackson/core/json/async/NonBlockingJsonParser.java
87
112
Non-blocking parser reports incorrect locations when fed with non-zero offset
When feeding a non-blocking parser, the input array offset leaks into the offsets reported by `getCurrentLocation()` and `getTokenLocation()`. For example, feeding with an offset of 7 yields tokens whose reported locations are 7 greater than they should be. Likewise the current location reported by the parser is 7 greater than the correct location. It's not possible for a user to work around this issue by subtracting 7 from the reported locations, because the token location may have been established by an earlier feeding with a different offset. Jackson version: 2.9.8 Unit test: ```java import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.async.ByteArrayFeeder; import org.junit.Test; import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.assertEquals; public class FeedingOffsetTest { @Test public void inputOffsetShouldNotAffectLocations() throws Exception { JsonFactory jsonFactory = new JsonFactory(); JsonParser parser = jsonFactory.createNonBlockingByteArrayParser(); ByteArrayFeeder feeder = (ByteArrayFeeder) parser.getNonBlockingInputFeeder(); byte[] input = "[[[".getBytes(UTF_8); feeder.feedInput(input, 2, 3); assertEquals(JsonToken.START_ARRAY, parser.nextToken()); assertEquals(1, parser.getCurrentLocation().getByteOffset()); // ACTUAL = 3 assertEquals(1, parser.getTokenLocation().getByteOffset()); // ACTUAL = 3 feeder.feedInput(input, 0, 1); assertEquals(JsonToken.START_ARRAY, parser.nextToken()); assertEquals(2, parser.getCurrentLocation().getByteOffset()); assertEquals(2, parser.getTokenLocation().getByteOffset()); } } ```
@Override public void feedInput(byte[] buf, int start, int end) throws IOException { // Must not have remaining input if (_inputPtr < _inputEnd) { _reportError("Still have %d undecoded bytes, should not call 'feedInput'", _inputEnd - _inputPtr); } if (end < start) { _reportError("Input end (%d) may not be before start (%d)", end, start); } // and shouldn't have been marked as end-of-input if (_endOfInput) { _reportError("Already closed, can not feed more input"); } // Time to update pointers first _currInputProcessed += _origBufferLen; // Also need to adjust row start, to work as if it extended into the past wrt new buffer _currInputRowStart = start - (_inputEnd - _currInputRowStart); // And then update buffer settings _inputBuffer = buf; _inputPtr = start; _inputEnd = end; _origBufferLen = end - start; }
@ Override public void feedInput ( byte [ ] buf , int start , int end ) throws IOException { if ( _inputPtr < _inputEnd ) { _reportError ( "Still have %d undecoded bytes, should not call 'feedInput'" , _inputEnd - _inputPtr ) ; } if ( end < start ) { _reportError ( "Input end (%d) may not be before start (%d)" , end , start ) ; } if ( _endOfInput ) { _reportError ( "Already closed, can not feed more input" ) ; } _currInputProcessed += _origBufferLen ; _currInputRowStart = start - ( _inputEnd - _currInputRowStart ) ; _inputBuffer = buf ; _inputPtr = start ; _inputEnd = end ; _origBufferLen = end - start ; }
@Override public void feedInput(byte[] buf, int start, int end) throws IOException { // Must not have remaining input if (_inputPtr < _inputEnd) { _reportError("Still have %d undecoded bytes, should not call 'feedInput'", _inputEnd - _inputPtr); } if (end < start) { _reportError("Input end (%d) may not be before start (%d)", end, start); } // and shouldn't have been marked as end-of-input if (_endOfInput) { _reportError("Already closed, can not feed more input"); } // Time to update pointers first _currInputProcessed += _origBufferLen; // Also need to adjust row start, to work as if it extended into the past wrt new buffer _currInputRowStart = start - (_inputEnd - _currInputRowStart); // And then update buffer settings _currBufferStart = start; _inputBuffer = buf; _inputPtr = start; _inputEnd = end; _origBufferLen = end - start; }
@ Override public void feedInput ( byte [ ] buf , int start , int end ) throws IOException { if ( _inputPtr < _inputEnd ) { _reportError ( "Still have %d undecoded bytes, should not call 'feedInput'" , _inputEnd - _inputPtr ) ; } if ( end < start ) { _reportError ( "Input end (%d) may not be before start (%d)" , end , start ) ; } if ( _endOfInput ) { _reportError ( "Already closed, can not feed more input" ) ; } _currInputProcessed += _origBufferLen ; _currInputRowStart = start - ( _inputEnd - _currInputRowStart ) ; _currBufferStart = start ; _inputBuffer = buf ; _inputPtr = start ; _inputEnd = end ; _origBufferLen = end - start ; }
Mockito
5
src/org/mockito/internal/verification/VerificationOverTimeImpl.java
75
99
Mockito 1.10.x timeout verification needs JUnit classes (VerifyError, NoClassDefFoundError)
If JUnit is not on the classpath and mockito is version 1.10.x (as of now 1.10.1 up to 1.10.19) and the code is using the timeout verification which is not supposed to be related to JUnit, then the JVM may fail with a `VerifyError` or a `NoClassDefFoundError`. This issue has been reported on the [mailing list](https://groups.google.com/forum/#!topic/mockito/A6D7myKiD5k) and on [StackOverflow](http://stackoverflow.com/questions/27721621/java-lang-verifyerror-with-mockito-1-10-17) A simple test like that with **TestNG** (and no JUnit in the class path of course) exposes the issue: ``` import org.testng.annotations.Test; import java.util.Observable; import static org.mockito.Mockito.*; public class VerifyErrorOnVerificationWithTimeoutTest { @Test public void should_not_throw_VerifyError() { verify(mock(Observable.class), timeout(500)).countObservers(); } } ``` With TestNG 5.13.1, the stack trace is : ``` java.lang.VerifyError: (class: org/mockito/internal/verification/VerificationOverTimeImpl, method: verify signature: (Lorg/mockito/internal/verification/api/VerificationData;)V) Incompatible argument to function at org.mockito.verification.Timeout.<init>(Timeout.java:32) at org.mockito.verification.Timeout.<init>(Timeout.java:25) at org.mockito.Mockito.timeout(Mockito.java:2103) at com.example.UserServiceImplTest.test(UserServiceImplTest.java:26) ``` TestNG includes a dependency on JUnit 3.8.1, which has the `junit.framework.ComparisonFailure`, but the JVM cannot perform the linking at runtime (`VerifyError` extends `LinkageError`), probably because for the JVM there's some incompatible changes in this class between version 3.x and 4.x. Note that Mockito is compiled against JUnit 4.x. This also reveal that Mockito is not anymore compatible with JUnit 3.x. With TestNG 6.8.13, the stack trace is : ``` java.lang.NoClassDefFoundError: junit/framework/ComparisonFailure at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:637) at java.lang.ClassLoader.defineClass(ClassLoader.java:621) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.mockito.verification.Timeout.<init>(Timeout.java:32) at org.mockito.verification.Timeout.<init>(Timeout.java:25) at org.mockito.Mockito.timeout(Mockito.java:2103) at com.example.UserServiceImplTest.test(UserServiceImplTest.java:26) Caused by: java.lang.ClassNotFoundException: junit.framework.ComparisonFailure at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) ... 49 more ``` Indeed JUnit is not anymore a dependency of TestNG. In this specific case the issue is that the `Timeout` class wraps a `VerficationOverTimeImpl` that uses in try/catch block the exception `org.mockito.exceptions.verification.junit.ArgumentsAreDifferent` which extends `junit.framework.ComparisonFailure`. At this time it seems to be the only place where JUnit is needed, this affect the following public API : ``` java Mockito.timeout(...) Mockito.after(...) ```
public void verify(VerificationData data) { AssertionError error = null; timer.start(); while (timer.isCounting()) { try { delegate.verify(data); if (returnOnSuccess) { return; } else { error = null; } } catch (MockitoAssertionError e) { error = handleVerifyException(e); } catch (org.mockito.exceptions.verification.junit.ArgumentsAreDifferent e) { error = handleVerifyException(e); } } if (error != null) { throw error; } }
public void verify ( VerificationData data ) { AssertionError error = null ; timer . start ( ) ; while ( timer . isCounting ( ) ) { try { delegate . verify ( data ) ; if ( returnOnSuccess ) { return ; } else { error = null ; } } catch ( MockitoAssertionError e ) { error = handleVerifyException ( e ) ; } catch ( org . mockito . exceptions . verification . junit . ArgumentsAreDifferent e ) { error = handleVerifyException ( e ) ; } } if ( error != null ) { throw error ; } }
public void verify(VerificationData data) { AssertionError error = null; timer.start(); while (timer.isCounting()) { try { delegate.verify(data); if (returnOnSuccess) { return; } else { error = null; } } catch (MockitoAssertionError e) { error = handleVerifyException(e); } catch (AssertionError e) { error = handleVerifyException(e); } } if (error != null) { throw error; } }
public void verify ( VerificationData data ) { AssertionError error = null ; timer . start ( ) ; while ( timer . isCounting ( ) ) { try { delegate . verify ( data ) ; if ( returnOnSuccess ) { return ; } else { error = null ; } } catch ( MockitoAssertionError e ) { error = handleVerifyException ( e ) ; } catch ( AssertionError e ) { error = handleVerifyException ( e ) ; } } if ( error != null ) { throw error ; } }
JacksonDatabind
50
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializer.java
376
474
`@JsonIdentityInfo` deserialization fails with combination of forward references, `@JsonCreator`
As a follow-up to bug #1255, the patch I provided exposes related deserialization problems. I have attached a small project ('jackson-test.zip') to demonstrate these issues. When run with both patches from #1255, the output is provided in the attached 'both.txt'. When run with just the first patch from #1255, the output is provided in the attached 'first.txt'. Important points: 1. When the object expressed as an id is contained within a collection or map (List in this example), deserialization works correctly. When it is a field of an object, deserialization is broken. 2. This particular example doesn't have forward references, but it does have cycles. Nevertheless, I have seen situations where non-cyclical forward-references also do not deserialize properly, with the same caveat as in 1. [jackson-test.zip](https://github.com/FasterXML/jackson-databind/files/301884/jackson-test.zip) [both.txt](https://github.com/FasterXML/jackson-databind/files/301885/both.txt) [first.txt](https://github.com/FasterXML/jackson-databind/files/301886/first.txt)
@Override @SuppressWarnings("resource") protected Object _deserializeUsingPropertyBased(final JsonParser p, final DeserializationContext ctxt) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator; PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader); TokenBuffer unknown = null; JsonToken t = p.getCurrentToken(); for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) { String propName = p.getCurrentName(); p.nextToken(); // to point to value // creator property? SettableBeanProperty creatorProp = creator.findCreatorProperty(propName); if (creatorProp != null) { // Last creator property to set? if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) { p.nextToken(); // to move to following FIELD_NAME/END_OBJECT Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { bean = wrapInstantiationProblem(e, ctxt); } if (bean == null) { return ctxt.handleInstantiationProblem(handledType(), null, _creatorReturnedNullException()); } // [databind#631]: Assign current value, to be accessible by custom serializers p.setCurrentValue(bean); // polymorphic? if (bean.getClass() != _beanType.getRawClass()) { return handlePolymorphic(p, ctxt, bean, unknown); } if (unknown != null) { // nope, just extra unknown stuff... bean = handleUnknownProperties(ctxt, bean, unknown); } // or just clean? return deserialize(p, ctxt, bean); } continue; } // Object Id property? if (buffer.readIdProperty(propName)) { continue; } // regular property? needs buffering SettableBeanProperty prop = _beanProperties.find(propName); if (prop != null) { buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop)); // 14-Jun-2016, tatu: As per [databind#1261], looks like we need additional // handling of forward references here. Not exactly sure why existing // facilities did not cover, but this does appear to solve the problem continue; } // Things marked as ignorable should not be passed to any setter if (_ignorableProps != null && _ignorableProps.contains(propName)) { handleIgnoredProperty(p, ctxt, handledType(), propName); continue; } // "any property"? if (_anySetter != null) { try { buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt)); } catch (Exception e) { wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt); } continue; } // Ok then, let's collect the whole field; name and value if (unknown == null) { unknown = new TokenBuffer(p, ctxt); } unknown.writeFieldName(propName); unknown.copyCurrentStructure(p); } // We hit END_OBJECT, so: Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { wrapInstantiationProblem(e, ctxt); bean = null; // never gets here } if (unknown != null) { // polymorphic? if (bean.getClass() != _beanType.getRawClass()) { return handlePolymorphic(null, ctxt, bean, unknown); } // no, just some extra unknown properties return handleUnknownProperties(ctxt, bean, unknown); } return bean; }
@ Override @ SuppressWarnings ( "resource" ) protected Object _deserializeUsingPropertyBased ( final JsonParser p , final DeserializationContext ctxt ) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer unknown = null ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { bean = wrapInstantiationProblem ( e , ctxt ) ; } if ( bean == null ) { return ctxt . handleInstantiationProblem ( handledType ( ) , null , _creatorReturnedNullException ( ) ) ; } p . setCurrentValue ( bean ) ; if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( p , ctxt , bean , unknown ) ; } if ( unknown != null ) { bean = handleUnknownProperties ( ctxt , bean , unknown ) ; } return deserialize ( p , ctxt , bean ) ; } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , _deserializeWithErrorWrapping ( p , ctxt , prop ) ) ; continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { try { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; } continue ; } if ( unknown == null ) { unknown = new TokenBuffer ( p , ctxt ) ; } unknown . writeFieldName ( propName ) ; unknown . copyCurrentStructure ( p ) ; } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; bean = null ; } if ( unknown != null ) { if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( null , ctxt , bean , unknown ) ; } return handleUnknownProperties ( ctxt , bean , unknown ) ; } return bean ; }
@Override @SuppressWarnings("resource") protected Object _deserializeUsingPropertyBased(final JsonParser p, final DeserializationContext ctxt) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator; PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader); TokenBuffer unknown = null; JsonToken t = p.getCurrentToken(); List<BeanReferring> referrings = null; for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) { String propName = p.getCurrentName(); p.nextToken(); // to point to value // creator property? SettableBeanProperty creatorProp = creator.findCreatorProperty(propName); if (creatorProp != null) { // Last creator property to set? if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) { p.nextToken(); // to move to following FIELD_NAME/END_OBJECT Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { bean = wrapInstantiationProblem(e, ctxt); } if (bean == null) { return ctxt.handleInstantiationProblem(handledType(), null, _creatorReturnedNullException()); } // [databind#631]: Assign current value, to be accessible by custom serializers p.setCurrentValue(bean); // polymorphic? if (bean.getClass() != _beanType.getRawClass()) { return handlePolymorphic(p, ctxt, bean, unknown); } if (unknown != null) { // nope, just extra unknown stuff... bean = handleUnknownProperties(ctxt, bean, unknown); } // or just clean? return deserialize(p, ctxt, bean); } continue; } // Object Id property? if (buffer.readIdProperty(propName)) { continue; } // regular property? needs buffering SettableBeanProperty prop = _beanProperties.find(propName); if (prop != null) { try { buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop)); } catch (UnresolvedForwardReference reference) { // 14-Jun-2016, tatu: As per [databind#1261], looks like we need additional // handling of forward references here. Not exactly sure why existing // facilities did not cover, but this does appear to solve the problem BeanReferring referring = handleUnresolvedReference(p, prop, buffer, reference); if (referrings == null) { referrings = new ArrayList<BeanReferring>(); } referrings.add(referring); } continue; } // Things marked as ignorable should not be passed to any setter if (_ignorableProps != null && _ignorableProps.contains(propName)) { handleIgnoredProperty(p, ctxt, handledType(), propName); continue; } // "any property"? if (_anySetter != null) { try { buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt)); } catch (Exception e) { wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt); } continue; } // Ok then, let's collect the whole field; name and value if (unknown == null) { unknown = new TokenBuffer(p, ctxt); } unknown.writeFieldName(propName); unknown.copyCurrentStructure(p); } // We hit END_OBJECT, so: Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { wrapInstantiationProblem(e, ctxt); bean = null; // never gets here } if (referrings != null) { for (BeanReferring referring : referrings) { referring.setBean(bean); } } if (unknown != null) { // polymorphic? if (bean.getClass() != _beanType.getRawClass()) { return handlePolymorphic(null, ctxt, bean, unknown); } // no, just some extra unknown properties return handleUnknownProperties(ctxt, bean, unknown); } return bean; }
@ Override @ SuppressWarnings ( "resource" ) protected Object _deserializeUsingPropertyBased ( final JsonParser p , final DeserializationContext ctxt ) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer unknown = null ; JsonToken t = p . getCurrentToken ( ) ; List < BeanReferring > referrings = null ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { bean = wrapInstantiationProblem ( e , ctxt ) ; } if ( bean == null ) { return ctxt . handleInstantiationProblem ( handledType ( ) , null , _creatorReturnedNullException ( ) ) ; } p . setCurrentValue ( bean ) ; if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( p , ctxt , bean , unknown ) ; } if ( unknown != null ) { bean = handleUnknownProperties ( ctxt , bean , unknown ) ; } return deserialize ( p , ctxt , bean ) ; } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { try { buffer . bufferProperty ( prop , _deserializeWithErrorWrapping ( p , ctxt , prop ) ) ; } catch ( UnresolvedForwardReference reference ) { BeanReferring referring = handleUnresolvedReference ( p , prop , buffer , reference ) ; if ( referrings == null ) { referrings = new ArrayList < BeanReferring > ( ) ; } referrings . add ( referring ) ; } continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { try { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; } continue ; } if ( unknown == null ) { unknown = new TokenBuffer ( p , ctxt ) ; } unknown . writeFieldName ( propName ) ; unknown . copyCurrentStructure ( p ) ; } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; bean = null ; } if ( referrings != null ) { for ( BeanReferring referring : referrings ) { referring . setBean ( bean ) ; } } if ( unknown != null ) { if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( null , ctxt , bean , unknown ) ; } return handleUnknownProperties ( ctxt , bean , unknown ) ; } return bean ; }
JacksonDatabind
27
src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializer.java
773
857
Problem deserializing External Type Id if type id comes before POJO
(note: seems to be similar or related to https://github.com/FasterXML/jackson-module-afterburner/issues/58) With 2.6, looks like handling of External Type Id is broken in some rare (?) cases; existing unit tests did not catch this. At this point I am speculating this is due to some refactoring, or change to use more efficient 'nextFieldName()' method.
@SuppressWarnings("resource") protected Object deserializeUsingPropertyBasedWithExternalTypeId(JsonParser p, DeserializationContext ctxt) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler.start(); final PropertyBasedCreator creator = _propertyBasedCreator; PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader); TokenBuffer tokens = new TokenBuffer(p); tokens.writeStartObject(); JsonToken t = p.getCurrentToken(); for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) { String propName = p.getCurrentName(); p.nextToken(); // to point to value // creator property? SettableBeanProperty creatorProp = creator.findCreatorProperty(propName); if (creatorProp != null) { // first: let's check to see if this might be part of value with external type id: // 11-Sep-2015, tatu: Important; do NOT pass buffer as last arg, but null, // since it is not the bean if (ext.handlePropertyValue(p, ctxt, propName, buffer)) { ; } else { // Last creator property to set? if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) { t = p.nextToken(); // to move to following FIELD_NAME/END_OBJECT Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt); continue; // never gets here } // if so, need to copy all remaining tokens into buffer while (t == JsonToken.FIELD_NAME) { p.nextToken(); // to skip name tokens.copyCurrentStructure(p); t = p.nextToken(); } if (bean.getClass() != _beanType.getRawClass()) { // !!! 08-Jul-2011, tatu: Could theoretically support; but for now // it's too complicated, so bail out throw ctxt.mappingException("Can not create polymorphic instances with unwrapped values"); } return ext.complete(p, ctxt, bean); } } continue; } // Object Id property? if (buffer.readIdProperty(propName)) { continue; } // regular property? needs buffering SettableBeanProperty prop = _beanProperties.find(propName); if (prop != null) { buffer.bufferProperty(prop, prop.deserialize(p, ctxt)); continue; } // external type id (or property that depends on it)? if (ext.handlePropertyValue(p, ctxt, propName, null)) { continue; } /* As per [JACKSON-313], things marked as ignorable should not be * passed to any setter */ if (_ignorableProps != null && _ignorableProps.contains(propName)) { handleIgnoredProperty(p, ctxt, handledType(), propName); continue; } // "any property"? if (_anySetter != null) { buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt)); } } // We hit END_OBJECT; resolve the pieces: try { return ext.complete(p, ctxt, buffer, creator); } catch (Exception e) { wrapInstantiationProblem(e, ctxt); return null; // never gets here } }
@ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithExternalTypeId ( JsonParser p , DeserializationContext ctxt ) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler . start ( ) ; final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( ext . handlePropertyValue ( p , ctxt , propName , buffer ) ) { ; } else { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { t = p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; continue ; } while ( t == JsonToken . FIELD_NAME ) { p . nextToken ( ) ; tokens . copyCurrentStructure ( p ) ; t = p . nextToken ( ) ; } if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { throw ctxt . mappingException ( "Can not create polymorphic instances with unwrapped values" ) ; } return ext . complete ( p , ctxt , bean ) ; } } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } try { return ext . complete ( p , ctxt , buffer , creator ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; return null ; } }
@SuppressWarnings("resource") protected Object deserializeUsingPropertyBasedWithExternalTypeId(JsonParser p, DeserializationContext ctxt) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler.start(); final PropertyBasedCreator creator = _propertyBasedCreator; PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader); TokenBuffer tokens = new TokenBuffer(p); tokens.writeStartObject(); JsonToken t = p.getCurrentToken(); for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) { String propName = p.getCurrentName(); p.nextToken(); // to point to value // creator property? SettableBeanProperty creatorProp = creator.findCreatorProperty(propName); if (creatorProp != null) { // first: let's check to see if this might be part of value with external type id: // 11-Sep-2015, tatu: Important; do NOT pass buffer as last arg, but null, // since it is not the bean if (ext.handlePropertyValue(p, ctxt, propName, null)) { ; } else { // Last creator property to set? if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) { t = p.nextToken(); // to move to following FIELD_NAME/END_OBJECT Object bean; try { bean = creator.build(ctxt, buffer); } catch (Exception e) { wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt); continue; // never gets here } // if so, need to copy all remaining tokens into buffer while (t == JsonToken.FIELD_NAME) { p.nextToken(); // to skip name tokens.copyCurrentStructure(p); t = p.nextToken(); } if (bean.getClass() != _beanType.getRawClass()) { // !!! 08-Jul-2011, tatu: Could theoretically support; but for now // it's too complicated, so bail out throw ctxt.mappingException("Can not create polymorphic instances with unwrapped values"); } return ext.complete(p, ctxt, bean); } } continue; } // Object Id property? if (buffer.readIdProperty(propName)) { continue; } // regular property? needs buffering SettableBeanProperty prop = _beanProperties.find(propName); if (prop != null) { buffer.bufferProperty(prop, prop.deserialize(p, ctxt)); continue; } // external type id (or property that depends on it)? if (ext.handlePropertyValue(p, ctxt, propName, null)) { continue; } /* As per [JACKSON-313], things marked as ignorable should not be * passed to any setter */ if (_ignorableProps != null && _ignorableProps.contains(propName)) { handleIgnoredProperty(p, ctxt, handledType(), propName); continue; } // "any property"? if (_anySetter != null) { buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt)); } } // We hit END_OBJECT; resolve the pieces: try { return ext.complete(p, ctxt, buffer, creator); } catch (Exception e) { wrapInstantiationProblem(e, ctxt); return null; // never gets here } }
@ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithExternalTypeId ( JsonParser p , DeserializationContext ctxt ) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler . start ( ) ; final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { ; } else { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { t = p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; continue ; } while ( t == JsonToken . FIELD_NAME ) { p . nextToken ( ) ; tokens . copyCurrentStructure ( p ) ; t = p . nextToken ( ) ; } if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { throw ctxt . mappingException ( "Can not create polymorphic instances with unwrapped values" ) ; } return ext . complete ( p , ctxt , bean ) ; } } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } try { return ext . complete ( p , ctxt , buffer , creator ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; return null ; } }
Cli
14
src/java/org/apache/commons/cli2/option/GroupImpl.java
237
282
adding a FileValidator results in ClassCastException in parser.parseAndHelp(args)
When I add a FileValidator.getExistingFileInstance() to an Argument, I get a ClassCastException when I parse args. Below is a testcase invoke with java org.apache.commons.cli2.issues.CLI2Sample -classpath commons-cli-2.0-SNAPSHOT.jar --file-name path-to-an-existing-file Run it and you get: Exception in thread "main" java.lang.ClassCastException: java.io.File cannot be cast to java.lang.String at org.apache.commons.cli2.validation.FileValidator.validate(FileValidator.java:122) at org.apache.commons.cli2.option.ArgumentImpl.validate(ArgumentImpl.java:250) at org.apache.commons.cli2.option.ParentImpl.validate(ParentImpl.java:123) at org.apache.commons.cli2.option.DefaultOption.validate(DefaultOption.java:175) at org.apache.commons.cli2.option.GroupImpl.validate(GroupImpl.java:264) at org.apache.commons.cli2.commandline.Parser.parse(Parser.java:105) at org.apache.commons.cli2.commandline.Parser.parseAndHelp(Parser.java:125) at org.apache.commons.cli2.issues.CLI2Sample.main(CLI2Sample.java:38) Comment out the withValidator call and it runs with no exception. I also get a similar ClassCastException if I add a .withValidator(NumberValidator.getIntegerInstance()) to another option/argument. Here is the source package org.apache.commons.cli2.issues; import java.io.File; import org.apache.commons.cli2.CommandLine; import org.apache.commons.cli2.Group; import org.apache.commons.cli2.builder.ArgumentBuilder; import org.apache.commons.cli2.builder.DefaultOptionBuilder; import org.apache.commons.cli2.builder.GroupBuilder; import org.apache.commons.cli2.commandline.Parser; import org.apache.commons.cli2.option.DefaultOption; import org.apache.commons.cli2.validation.FileValidator; public class CLI2Sample { public static void main(String[] args) { final DefaultOptionBuilder obuilder = new DefaultOptionBuilder(); final ArgumentBuilder abuilder = new ArgumentBuilder(); final GroupBuilder gbuilder = new GroupBuilder(); DefaultOption fileNameOption = obuilder .withShortName("f") .withLongName("file-name") .withRequired(true) .withDescription("name of an existing file") .withArgument(abuilder .withName("file-name") .withValidator(FileValidator.getExistingFileInstance()) .create()) .create(); Group options = gbuilder .withName("options") .withOption(fileNameOption) .create(); Parser parser = new Parser(); parser.setHelpTrigger("--help"); parser.setGroup(options); CommandLine cl = parser.parseAndHelp(args); } }
public void validate(final WriteableCommandLine commandLine) throws OptionException { // number of options found int present = 0; // reference to first unexpected option Option unexpected = null; for (final Iterator i = options.iterator(); i.hasNext();) { final Option option = (Option) i.next(); // needs validation? boolean validate = option.isRequired() || option instanceof Group; if (validate) { option.validate(commandLine); } // if the child option is present then validate it if (commandLine.hasOption(option)) { if (++present > maximum) { unexpected = option; break; } option.validate(commandLine); } } // too many options if (unexpected != null) { throw new OptionException(this, ResourceConstants.UNEXPECTED_TOKEN, unexpected.getPreferredName()); } // too few option if (present < minimum) { throw new OptionException(this, ResourceConstants.MISSING_OPTION); } // validate each anonymous argument for (final Iterator i = anonymous.iterator(); i.hasNext();) { final Option option = (Option) i.next(); option.validate(commandLine); } }
public void validate ( final WriteableCommandLine commandLine ) throws OptionException { int present = 0 ; Option unexpected = null ; for ( final Iterator i = options . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; boolean validate = option . isRequired ( ) || option instanceof Group ; if ( validate ) { option . validate ( commandLine ) ; } if ( commandLine . hasOption ( option ) ) { if ( ++ present > maximum ) { unexpected = option ; break ; } option . validate ( commandLine ) ; } } if ( unexpected != null ) { throw new OptionException ( this , ResourceConstants . UNEXPECTED_TOKEN , unexpected . getPreferredName ( ) ) ; } if ( present < minimum ) { throw new OptionException ( this , ResourceConstants . MISSING_OPTION ) ; } for ( final Iterator i = anonymous . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; option . validate ( commandLine ) ; } }
public void validate(final WriteableCommandLine commandLine) throws OptionException { // number of options found int present = 0; // reference to first unexpected option Option unexpected = null; for (final Iterator i = options.iterator(); i.hasNext();) { final Option option = (Option) i.next(); // needs validation? boolean validate = option.isRequired() || option instanceof Group; // if the child option is present then validate it if (commandLine.hasOption(option)) { if (++present > maximum) { unexpected = option; break; } validate = true; } if (validate) { option.validate(commandLine); } } // too many options if (unexpected != null) { throw new OptionException(this, ResourceConstants.UNEXPECTED_TOKEN, unexpected.getPreferredName()); } // too few option if (present < minimum) { throw new OptionException(this, ResourceConstants.MISSING_OPTION); } // validate each anonymous argument for (final Iterator i = anonymous.iterator(); i.hasNext();) { final Option option = (Option) i.next(); option.validate(commandLine); } }
public void validate ( final WriteableCommandLine commandLine ) throws OptionException { int present = 0 ; Option unexpected = null ; for ( final Iterator i = options . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; boolean validate = option . isRequired ( ) || option instanceof Group ; if ( commandLine . hasOption ( option ) ) { if ( ++ present > maximum ) { unexpected = option ; break ; } validate = true ; } if ( validate ) { option . validate ( commandLine ) ; } } if ( unexpected != null ) { throw new OptionException ( this , ResourceConstants . UNEXPECTED_TOKEN , unexpected . getPreferredName ( ) ) ; } if ( present < minimum ) { throw new OptionException ( this , ResourceConstants . MISSING_OPTION ) ; } for ( final Iterator i = anonymous . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; option . validate ( commandLine ) ; } }

Dataset Card for NL2Fix

Dataset Details

Cite the Dataset

  @article{fakhoury2023towards,
    title={Towards generating functionally correct code edits from natural language issue descriptions},
    author={Fakhoury, Sarah and Chakraborty, Saikat and Musuvathi, Madan and Lahiri, Shuvendu K},
    journal={arXiv preprint arXiv:2304.03816},
    year={2023}
  }

Dataset Structure

  • name: {defects4j_project}
    dtype: {string}
  • name: {defects4j_bug_id}
    dtype: {int}
  • name: {file_path}
    dtype: {string}
  • name: {bug_start_line}
    dtype: {int}
  • name: {bug_end_line}
    dtype: {int}
  • name: {issue_title}
    dtype: {string}
  • name: {issue_description}
    dtype: {string}
  • name: {original_src_wo_comments}
    dtype: {string}
  • name: {fixed_src_wo_comments}
    dtype: {string}

image/png

Source Data

Defects4J: https://github.com/rjust/defects4j/tree/master

Downloads last month
0
Edit dataset card