idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
2,700
public TarEntry [ ] getDirectoryEntries ( ) throws InvalidHeaderException { if ( this . file == null || ! this . file . isDirectory ( ) ) { return new TarEntry [ 0 ] ; } String [ ] list = this . file . list ( ) ; TarEntry [ ] result = new TarEntry [ list . length ] ; for ( int i = 0 ; i < list . length ; ++ i ) { result [ i ] = new TarEntry ( new File ( this . file , list [ i ] ) ) ; } return result ; }
If this entry represents a file and the file is a directory return an array of TarEntries for this entry s children .
2,701
public long computeCheckSum ( byte [ ] buf ) { long sum = 0 ; for ( int i = 0 ; i < buf . length ; ++ i ) { sum += 255 & buf [ i ] ; } return sum ; }
Compute the checksum of a tar entry header .
2,702
public void writeEntryHeader ( byte [ ] outbuf ) throws InvalidHeaderException { int offset = 0 ; if ( this . isUnixTarFormat ( ) ) { if ( this . header . name . length ( ) > 100 ) { throw new InvalidHeaderException ( "file path is greater than 100 characters, " + this . header . name ) ; } } offset = TarHeader . getFileNameBytes ( this . header . name . toString ( ) , outbuf ) ; offset = TarHeader . getOctalBytes ( this . header . mode , outbuf , offset , TarHeader . MODELEN ) ; offset = TarHeader . getOctalBytes ( this . header . userId , outbuf , offset , TarHeader . UIDLEN ) ; offset = TarHeader . getOctalBytes ( this . header . groupId , outbuf , offset , TarHeader . GIDLEN ) ; long size = this . header . size ; offset = TarHeader . getLongOctalBytes ( size , outbuf , offset , TarHeader . SIZELEN ) ; offset = TarHeader . getLongOctalBytes ( this . header . modTime , outbuf , offset , TarHeader . MODTIMELEN ) ; int csOffset = offset ; for ( int c = 0 ; c < TarHeader . CHKSUMLEN ; ++ c ) { outbuf [ offset ++ ] = ( byte ) ' ' ; } outbuf [ offset ++ ] = this . header . linkFlag ; offset = TarHeader . getNameBytes ( this . header . linkName , outbuf , offset , TarHeader . NAMELEN ) ; if ( this . unixFormat ) { for ( int i = 0 ; i < TarHeader . MAGICLEN ; ++ i ) { outbuf [ offset ++ ] = 0 ; } } else { offset = TarHeader . getNameBytes ( this . header . magic , outbuf , offset , TarHeader . MAGICLEN ) ; } offset = TarHeader . getNameBytes ( this . header . userName , outbuf , offset , TarHeader . UNAMELEN ) ; offset = TarHeader . getNameBytes ( this . header . groupName , outbuf , offset , TarHeader . GNAMELEN ) ; offset = TarHeader . getOctalBytes ( this . header . devMajor , outbuf , offset , TarHeader . DEVLEN ) ; offset = TarHeader . getOctalBytes ( this . header . devMinor , outbuf , offset , TarHeader . DEVLEN ) ; for ( ; offset < outbuf . length ; ) { outbuf [ offset ++ ] = 0 ; } long checkSum = this . computeCheckSum ( outbuf ) ; TarHeader . getCheckSumOctalBytes ( checkSum , outbuf , csOffset , TarHeader . CHKSUMLEN ) ; }
Write an entry s header information to a header buffer . This method can throw an InvalidHeaderException
2,703
public void nameTarHeader ( TarHeader hdr , String name ) { boolean isDir = name . endsWith ( "/" ) ; this . gnuFormat = false ; this . ustarFormat = true ; this . unixFormat = false ; hdr . checkSum = 0 ; hdr . devMajor = 0 ; hdr . devMinor = 0 ; hdr . name = new StringBuffer ( name ) ; hdr . mode = isDir ? 040755 : 0100644 ; hdr . userId = 0 ; hdr . groupId = 0 ; hdr . size = 0 ; hdr . checkSum = 0 ; hdr . modTime = ( new java . util . Date ( ) ) . getTime ( ) / 1000 ; hdr . linkFlag = isDir ? TarHeader . LF_DIR : TarHeader . LF_NORMAL ; hdr . linkName = new StringBuffer ( "" ) ; hdr . userName = new StringBuffer ( "" ) ; hdr . groupName = new StringBuffer ( "" ) ; hdr . devMajor = 0 ; hdr . devMinor = 0 ; }
Fill in a TarHeader given only the entry s name .
2,704
private void doCopy ( ) throws IOException { int copied = IOUtil . copy ( currentNodeStream , outputStream , BUFFER_LENGTH ) ; if ( copied == - 1 ) { currentNodeStream . close ( ) ; currentNodeStream = null ; endAsset ( ) ; } }
Performs copy operation between currentNodeStream and outputStream using buffer length .
2,705
private void startAsset ( final String path , final Asset asset ) throws IOException { putNextEntry ( outputStream , path , asset ) ; }
Start entry in stream .
2,706
String getProperty ( String key ) { String value = properties . get ( key ) ; if ( value == null ) { throw new RuntimeException ( "No property value found for key " + key ) ; } return value ; }
Gets the value for the given key in the properties map . If the property is not found an exception is thrown .
2,707
private void init ( ) throws IOException { bsPutUByte ( 'B' ) ; bsPutUByte ( 'Z' ) ; this . data = new Data ( this . blockSize100k ) ; this . blockSorter = new BlockSort ( this . data ) ; bsPutUByte ( 'h' ) ; bsPutUByte ( '0' + this . blockSize100k ) ; this . combinedCRC = 0 ; initBlock ( ) ; }
Writes magic bytes like BZ on the first position of the stream and bytes indiciating the file - format which is huffmanised followed by a digit indicating blockSize100k .
2,708
private void write0 ( int b ) throws IOException { if ( this . currentChar != - 1 ) { b &= 0xff ; if ( this . currentChar == b ) { if ( ++ this . runLength > 254 ) { writeRun ( ) ; this . currentChar = - 1 ; this . runLength = 0 ; } } else { writeRun ( ) ; this . runLength = 1 ; this . currentChar = b ; } } else { this . currentChar = b & 0xff ; this . runLength ++ ; } }
Keeps track of the last bytes written and implicitly performs run - length encoding as the first step of the bzip2 algorithm .
2,709
public static String optionallyRemovePrecedingSlash ( final String path ) { assertSpecified ( path ) ; if ( isFirstCharSlash ( path ) ) { return path . substring ( 1 ) ; } return path ; }
Removes if present the absolute slash preceding the specified path and returns the adjusted result .
2,710
public static String optionallyRemoveFollowingSlash ( final String path ) { assertSpecified ( path ) ; if ( isLastCharSlash ( path ) ) { return path . substring ( 0 , path . length ( ) - 1 ) ; } return path ; }
Removes if present the absolute slash following the specified path and returns the adjusted result .
2,711
public static String optionallyAppendSlash ( final String path ) { assertSpecified ( path ) ; if ( ! isLastCharSlash ( path ) ) { return path + ArchivePath . SEPARATOR ; } return path ; }
Adds if not already present the absolute slash following the specified path and returns the adjusted result .
2,712
public static String optionallyPrependSlash ( final String path ) { String resolved = path ; if ( resolved == null ) { resolved = EMPTY ; } if ( ! isFirstCharSlash ( resolved ) ) { return ArchivePath . SEPARATOR + resolved ; } return resolved ; }
Adds if not already present the absolute slash preceding the specified path and returns the adjusted result . If the argument is null adjusts to an empty String before processing .
2,713
private static boolean isFirstCharSlash ( final String path ) { assertSpecified ( path ) ; if ( path . length ( ) == 0 ) { return false ; } return path . charAt ( 0 ) == ArchivePath . SEPARATOR ; }
Returns whether or not the first character in the specified String is a slash
2,714
private void initialize ( int recordSize ) { this . rootPath = null ; this . pathPrefix = null ; this . tempPath = System . getProperty ( "user.dir" ) ; this . userId = 0 ; this . userName = "" ; this . groupId = 0 ; this . groupName = "" ; this . debug = false ; this . verbose = false ; this . keepOldFiles = false ; this . progressDisplay = null ; this . recordBuf = new byte [ this . getRecordSize ( ) ] ; }
Common constructor initialization code .
2,715
public void setDebug ( boolean debugF ) { this . debug = debugF ; if ( this . tarIn != null ) { this . tarIn . setDebug ( debugF ) ; } else if ( this . tarOut != null ) { this . tarOut . setDebug ( debugF ) ; } }
Set the debugging flag .
2,716
public void setUserInfo ( int userId , String userName , int groupId , String groupName ) { this . userId = userId ; this . userName = userName ; this . groupId = groupId ; this . groupName = groupName ; }
Set user and group information that will be used to fill in the tar archive s entry headers . Since Java currently provides no means of determining a user name user id group name or group id for a given File TarArchive allows the programmer to specify values to be used in their place .
2,717
public int getRecordSize ( ) { if ( this . tarIn != null ) { return this . tarIn . getRecordSize ( ) ; } else if ( this . tarOut != null ) { return this . tarOut . getRecordSize ( ) ; } return TarBuffer . DEFAULT_RCDSIZE ; }
Get the archive s record size . Because of its history tar supports the concept of buffered IO consisting of BLOCKS of RECORDS . This allowed tar to match the IO characteristics of the physical device being used . Of course in the Java world this makes no sense WITH ONE EXCEPTION - archives are expected to be propertly blocked . Thus all of the horrible TarBuffer support boils down to simply getting the boundaries correct .
2,718
private String getTempFilePath ( File eFile ) { String pathStr = this . tempPath + File . separator + eFile . getName ( ) + ".tmp" ; for ( int i = 1 ; i < 5 ; ++ i ) { File f = new File ( pathStr ) ; if ( ! f . exists ( ) ) { break ; } pathStr = this . tempPath + File . separator + eFile . getName ( ) + "-" + i + ".tmp" ; } return pathStr ; }
Get a path for a temporary file for a given File . The temporary file is NOT created . The algorithm attempts to handle filename collisions so that the name is unique .
2,719
public void listContents ( ) throws IOException { for ( ; ; ) { TarEntry entry = this . tarIn . getNextEntry ( ) ; if ( entry == null ) { if ( this . debug ) { System . err . println ( "READ EOF RECORD" ) ; } break ; } if ( this . progressDisplay != null ) { this . progressDisplay . showTarProgressMessage ( entry . getName ( ) ) ; } } }
Perform the list command and list the contents of the archive . NOTE That this method uses the progress display to actually list the conents . If the progress display is not set nothing will be listed!
2,720
public void extractContents ( File destDir ) throws IOException { for ( ; ; ) { TarEntry entry = this . tarIn . getNextEntry ( ) ; if ( entry == null ) { if ( this . debug ) { System . err . println ( "READ EOF RECORD" ) ; } break ; } this . extractEntry ( destDir , entry ) ; } }
Perform the extract command and extract the contents of the archive .
2,721
private void fswap ( int [ ] fmap , int zz1 , int zz2 ) { int zztmp = fmap [ zz1 ] ; fmap [ zz1 ] = fmap [ zz2 ] ; fmap [ zz2 ] = zztmp ; }
swaps two values in fmap
2,722
private void fvswap ( int [ ] fmap , int yyp1 , int yyp2 , int yyn ) { while ( yyn > 0 ) { fswap ( fmap , yyp1 , yyp2 ) ; yyp1 ++ ; yyp2 ++ ; yyn -- ; } }
swaps two intervals starting at yyp1 and yyp2 of length yyn inside fmap .
2,723
void setDefaults ( ) { if ( this . getClassLoaders ( ) == null ) { final ClassLoader tccl = SecurityActions . getThreadContextClassLoader ( ) ; if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "User has not defined an explicit " + ClassLoader . class . getSimpleName ( ) + "; defaulting to the TCCL: " + tccl ) ; } final Collection < ClassLoader > tcclCollection = new ArrayList < ClassLoader > ( 1 ) ; if ( tccl != null ) { tcclCollection . add ( tccl ) ; } else { tcclCollection . add ( ClassLoader . getSystemClassLoader ( ) ) ; } this . classLoaders = tcclCollection ; } final Collection < ClassLoader > adjustedCls = new HashSet < ClassLoader > ( ) ; for ( ClassLoader cl : this . classLoaders ) { if ( cl == null ) { cl = ClassLoader . getSystemClassLoader ( ) ; } adjustedCls . add ( cl ) ; } this . classLoaders = adjustedCls ; if ( getExtensionLoader ( ) == null ) { final ExtensionLoader loader = createDefaultExtensionLoader ( ) ; if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "User has not defined an explicit " + ExtensionLoader . class . getSimpleName ( ) + "; defaulting to " + loader ) ; } this . extensionLoader ( loader ) ; } }
Sets properties to their default values if they haven t been explicitly provided by the user . If no ClassLoaders are specified use the TCCL .
2,724
public static boolean matches ( byte [ ] signature , int length ) { if ( length < 3 ) { return false ; } if ( signature [ 0 ] != 'B' ) { return false ; } if ( signature [ 1 ] != 'Z' ) { return false ; } if ( signature [ 2 ] != 'h' ) { return false ; } return true ; }
Checks if the signature matches what is expected for a bzip2 file .
2,725
public static URLPackageScanner newInstance ( boolean addRecursively , final ClassLoader classLoader , final Callback callback , final String packageName ) { Validate . notNull ( packageName , "Package name must be specified" ) ; Validate . notNull ( addRecursively , "AddRecursively must be specified" ) ; Validate . notNull ( classLoader , "ClassLoader must be specified" ) ; Validate . notNull ( callback , "Callback must be specified" ) ; return new URLPackageScanner ( packageName , addRecursively , classLoader , callback ) ; }
Factory method to create an instance of URLPackageScanner .
2,726
static byte [ ] asByteArray ( final InputStream in ) throws IllegalArgumentException { if ( in == null ) { throw new IllegalArgumentException ( "stream must be specified" ) ; } final ByteArrayOutputStream out = new ByteArrayOutputStream ( 8192 ) ; final int len = 4096 ; final byte [ ] buffer = new byte [ len ] ; int read = 0 ; try { while ( ( ( read = in . read ( buffer ) ) != - 1 ) ) { out . write ( buffer , 0 , read ) ; } } catch ( final IOException ioe ) { throw new RuntimeException ( "Error in obtainting bytes from " + in , ioe ) ; } finally { try { in . close ( ) ; } catch ( final IOException ignore ) { if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Could not close stream due to: " + ignore . getMessage ( ) + "; ignoring" ) ; } } } final byte [ ] content = out . toByteArray ( ) ; return content ; }
Obtains the contents of the specified stream as a byte array
2,727
public static String asUTF8String ( InputStream in ) { Validate . notNull ( in , "Stream must be specified" ) ; StringBuilder buffer = new StringBuilder ( ) ; String line ; try { BufferedReader reader = new BufferedReader ( new InputStreamReader ( in , CHARSET_UTF8 ) ) ; while ( ( line = reader . readLine ( ) ) != null ) { buffer . append ( line ) . append ( Character . LINE_SEPARATOR ) ; } } catch ( IOException ioe ) { throw new RuntimeException ( "Error in obtaining string from " + in , ioe ) ; } finally { try { in . close ( ) ; } catch ( IOException ignore ) { if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Could not close stream due to: " + ignore . getMessage ( ) + "; ignoring" ) ; } } } return buffer . toString ( ) ; }
Obtains the contents of the specified stream as a String in UTF - 8 charset .
2,728
public static void bufferedWriteWithFlush ( final OutputStream output , final byte [ ] content ) throws IOException { final int size = 4096 ; int offset = 0 ; while ( content . length - offset > size ) { output . write ( content , offset , size ) ; offset += size ; } output . write ( content , offset , content . length - offset ) ; output . flush ( ) ; }
Writing the specified contents to the specified OutputStream using an internal buffer . Flushing the stream when completed . Caller is responsible for opening and closing the specified stream .
2,729
public static void copyWithClose ( InputStream input , OutputStream output ) throws IOException { try { copy ( input , output ) ; } finally { try { input . close ( ) ; } catch ( final IOException ignore ) { if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Could not close stream due to: " + ignore . getMessage ( ) + "; ignoring" ) ; } } try { output . close ( ) ; } catch ( final IOException ignore ) { if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Could not close stream due to: " + ignore . getMessage ( ) + "; ignoring" ) ; } } } }
Copies the contents from an InputStream to an OutputStream and closes both streams .
2,730
public InputStream openStream ( ) { try { return new BufferedInputStream ( new FileInputStream ( file ) , 8192 ) ; } catch ( FileNotFoundException e ) { throw new RuntimeException ( "Could not open file " + file , e ) ; } }
Opens a new FileInputStream for the given File .
2,731
public static long parseOctal ( byte [ ] header , int offset , int length ) throws InvalidHeaderException { long result = 0 ; boolean stillPadding = true ; int end = offset + length ; for ( int i = offset ; i < end ; ++ i ) { if ( header [ i ] == 0 ) { break ; } if ( header [ i ] == ( byte ) ' ' || header [ i ] == '0' ) { if ( stillPadding ) { continue ; } if ( header [ i ] == ( byte ) ' ' ) { break ; } } stillPadding = false ; result = ( result << 3 ) + ( header [ i ] - '0' ) ; } return result ; }
Parse an octal string from a header buffer . This is used for the file permission mode value .
2,732
public static StringBuffer parseName ( byte [ ] header , int offset , int length ) throws InvalidHeaderException { StringBuffer result = new StringBuffer ( length ) ; int end = offset + length ; for ( int i = offset ; i < end ; ++ i ) { if ( header [ i ] == 0 ) { break ; } result . append ( ( char ) header [ i ] ) ; } return result ; }
Parse an entry name from a header buffer .
2,733
public static int getNameBytes ( StringBuffer name , byte [ ] buf , int offset , int length ) { int i ; for ( i = 0 ; i < length && i < name . length ( ) ; ++ i ) { buf [ offset + i ] = ( byte ) name . charAt ( i ) ; } for ( ; i < length ; ++ i ) { buf [ offset + i ] = 0 ; } return offset + length ; }
Move the bytes from the name StringBuffer into the header s buffer .
2,734
public static int getOctalBytes ( long value , byte [ ] buf , int offset , int length ) { int idx = length - 1 ; buf [ offset + idx ] = 0 ; -- idx ; buf [ offset + idx ] = ( byte ) ' ' ; -- idx ; if ( value == 0 ) { buf [ offset + idx ] = ( byte ) '0' ; -- idx ; } else { for ( long val = value ; idx >= 0 && val > 0 ; -- idx ) { buf [ offset + idx ] = ( byte ) ( ( byte ) '0' + ( byte ) ( val & 7 ) ) ; val = val >> 3 ; } } for ( ; idx >= 0 ; -- idx ) { buf [ offset + idx ] = ( byte ) ' ' ; } return offset + length ; }
Parse an octal integer from a header buffer .
2,735
public static int getCheckSumOctalBytes ( long value , byte [ ] buf , int offset , int length ) { TarHeader . getOctalBytes ( value , buf , offset , length ) ; buf [ offset + length - 1 ] = ( byte ) ' ' ; buf [ offset + length - 2 ] = 0 ; return offset + length ; }
Parse the checksum octal integer from a header buffer .
2,736
private String merge ( final String first , final String [ ] more ) { assert first != null : "first must be specified" ; assert more != null : "more must be specified" ; final StringBuilder merged = new StringBuilder ( ) ; merged . append ( first ) ; for ( int i = 0 ; i < more . length ; i ++ ) { merged . append ( ArchivePath . SEPARATOR ) ; merged . append ( more [ i ] ) ; } return merged . toString ( ) ; }
Merges the path context with a varargs String sub - contexts returning the result
2,737
protected final T covariantReturn ( ) { try { return this . getActualClass ( ) . cast ( this ) ; } catch ( final ClassCastException cce ) { log . log ( Level . SEVERE , "The class specified by getActualClass is not a valid assignment target for this instance;" + " developer error" ) ; throw cce ; } }
Provides typesafe covariant return of this instance
2,738
public Path toAbsolutePath ( ) { if ( this . isAbsolute ( ) ) { return this ; } final Path absolutePath = new ShrinkWrapPath ( ArchivePath . SEPARATOR + this . path , this . fileSystem ) ; final Path normalized = absolutePath . normalize ( ) ; return normalized ; }
Resolves relative paths against the root directory normalizing as well .
2,739
private static List < String > tokenize ( final ShrinkWrapPath path ) { final StringTokenizer tokenizer = new StringTokenizer ( path . toString ( ) , ArchivePath . SEPARATOR_STRING ) ; final List < String > tokens = new ArrayList < > ( ) ; while ( tokenizer . hasMoreTokens ( ) ) { tokens . add ( tokenizer . nextToken ( ) ) ; } return tokens ; }
Returns the components of this path in order from root out
2,740
private static String normalize ( final List < String > path , boolean absolute ) { assert path != null : "path must be specified" ; if ( path . contains ( DIR_THIS ) ) { path . remove ( DIR_THIS ) ; normalize ( path , absolute ) ; } final int indexDirBack = path . indexOf ( DIR_BACK ) ; if ( indexDirBack != - 1 ) { if ( indexDirBack > 0 ) { path . remove ( indexDirBack ) ; path . remove ( indexDirBack - 1 ) ; normalize ( path , absolute ) ; } else { throw new IllegalArgumentException ( "Cannot specify to go back \"../\" past the root" ) ; } } final StringBuilder sb = new StringBuilder ( ) ; if ( absolute ) { sb . append ( ArchivePath . SEPARATOR ) ; } for ( int i = 0 ; i < path . size ( ) ; i ++ ) { if ( i > 0 ) { sb . append ( ArchivePath . SEPARATOR ) ; } sb . append ( path . get ( i ) ) ; } return sb . toString ( ) ; }
Normalizes the tokenized view of the path
2,741
private static ShrinkWrapPath relativizeCommonRoot ( final ShrinkWrapPath thisOriginal , final Path thisCurrent , final Path otherOriginal , Path otherCurrent , final int backupCount ) { assert thisOriginal != null ; assert thisCurrent != null ; assert otherOriginal != null ; assert otherCurrent != null ; assert backupCount >= 0 ; if ( ! otherCurrent . startsWith ( thisCurrent ) ) { final Path otherParent = otherCurrent . getParent ( ) ; final ShrinkWrapPath thisParent = ( ShrinkWrapPath ) thisCurrent . getParent ( ) ; if ( otherParent != null && thisParent != null ) { return relativizeCommonRoot ( thisOriginal , thisParent , otherOriginal , otherParent , backupCount + 1 ) ; } else { throw new IllegalArgumentException ( "No common components" ) ; } } final List < String > thisTokens = tokenize ( thisOriginal ) ; final List < String > otherTokens = tokenize ( ( ShrinkWrapPath ) otherOriginal ) ; final int numOtherTokens = otherTokens . size ( ) ; final int numToTake = otherTokens . size ( ) - thisTokens . size ( ) ; final StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < backupCount ; i ++ ) { sb . append ( DIR_BACK ) ; sb . append ( ArchivePath . SEPARATOR ) ; } final int startCounter = numOtherTokens - numToTake - backupCount ; final int stopCounter = numOtherTokens - 1 ; if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "Backup: " + backupCount ) ; log . finest ( "This tokens: " + thisTokens ) ; log . finest ( "Other tokens: " + otherTokens ) ; log . finest ( "Differential: " + numToTake ) ; log . finest ( "Start: " + startCounter ) ; log . finest ( "Stop: " + stopCounter ) ; } for ( int i = startCounter ; i <= stopCounter ; i ++ ) { if ( i > startCounter ) { sb . append ( ArchivePath . SEPARATOR ) ; } sb . append ( otherTokens . get ( i ) ) ; } return new ShrinkWrapPath ( sb . toString ( ) , thisOriginal . fileSystem ) ; }
Relativizes the paths recursively
2,742
protected void doExport ( ) { final Archive < ? > archive = getArchive ( ) ; if ( log . isLoggable ( Level . FINE ) ) { log . fine ( "Exporting archive - " + archive . getName ( ) ) ; } final Node rootNode = archive . get ( ArchivePaths . root ( ) ) ; for ( Node child : rootNode . getChildren ( ) ) { processNode ( child ) ; } }
Primary method providing a template for exporting the contents of an archive
2,743
private void processNode ( final Node node ) { processNode ( node . getPath ( ) , node ) ; Set < Node > children = node . getChildren ( ) ; for ( Node child : children ) { processNode ( child ) ; } }
Recursive call to process all the node hierarchy
2,744
private void processArchiveAsset ( File parentDirectory , ArchiveAsset nestedArchiveAsset ) { Archive < ? > nestedArchive = nestedArchiveAsset . getArchive ( ) ; nestedArchive . as ( ExplodedExporter . class ) . exportExploded ( parentDirectory ) ; }
Processes a nested archive by delegating to the ExplodedArchiveExporter
2,745
private File validateOutputDirectory ( File outputDirectory ) { if ( ! outputDirectory . mkdir ( ) && ! outputDirectory . exists ( ) ) { throw new ArchiveExportException ( "Unable to create archive output directory - " + outputDirectory ) ; } if ( outputDirectory . isFile ( ) ) { throw new IllegalArgumentException ( "Unable to export exploded directory to " + outputDirectory . getAbsolutePath ( ) + ", it points to a existing file" ) ; } return outputDirectory ; }
Initializes the output directory
2,746
private Archive < ? > getArchive ( final Path path ) { assert path != null : "Path must be specified" ; final FileSystem fs = path . getFileSystem ( ) ; assert fs != null : "File system is null" ; if ( ! ( fs instanceof ShrinkWrapFileSystem ) ) { throw new IllegalArgumentException ( "This path is not associated with a " + ShrinkWrapFileSystem . class . getSimpleName ( ) ) ; } final ShrinkWrapFileSystem swfs = ( ShrinkWrapFileSystem ) fs ; final Archive < ? > archive = swfs . getArchive ( ) ; assert archive != null : "No archive associated with file system" ; return archive ; }
Obtains the underlying archive associated with the specified Path
2,747
private File fileFromResource ( final String resourceName ) throws IllegalArgumentException { final URL resourceUrl = AccessController . doPrivileged ( GetTcclAction . INSTANCE ) . getResource ( resourceName ) ; Validate . notNull ( resourceUrl , resourceName + " doesn't exist or can't be accessed" ) ; String resourcePath = AccessController . doPrivileged ( GetTcclAction . INSTANCE ) . getResource ( resourceName ) . getFile ( ) ; try { resourcePath = URLDecoder . decode ( resourcePath , "UTF-8" ) ; } catch ( UnsupportedEncodingException uee ) { throw new IllegalArgumentException ( uee ) ; } return new File ( resourcePath ) ; }
Gets a resource from the TCCL and returns its file path .
2,748
private void writeObject ( final ObjectOutputStream out ) throws IOException { out . defaultWriteObject ( ) ; final InputStream in = archive . as ( ZipExporter . class ) . exportAsInputStream ( ) ; try { IOUtil . copy ( in , out ) ; out . writeObject ( id ) ; } finally { in . close ( ) ; } if ( log . isLoggable ( Level . FINER ) ) { log . finer ( "Wrote archive: " + archive . toString ( ) ) ; } }
Serializes the invocation with a custom form
2,749
private Format decimalFormat ( int fractionPrecision ) { DecimalFormat format ; if ( fractionPrecision == 0 ) { format = new DecimalFormat ( "###0" ) ; } else if ( fractionPrecision > 0 ) { StringBuilder formatSb = new StringBuilder ( ) ; formatSb . append ( "###0." ) ; appendChars ( formatSb , ZERO_CHARS , fractionPrecision ) ; format = new DecimalFormat ( formatSb . toString ( ) ) ; } else { format = new DecimalFormat ( "###0.###" ) ; } return format ; }
- d . ddd + - dd style if no precision then 6 digits inf nan if 0 precision then
2,750
public Object extractValueFromBytes ( int offset , byte [ ] bytes , boolean required ) { if ( ! required ) { return EMPTY ; } if ( offset >= bytes . length ) { return null ; } int len = ( bytes [ offset ] & 0xFF ) ; int left = bytes . length - offset - 1 ; if ( len > left ) { len = left ; } char [ ] chars = new char [ len ] ; for ( int i = 0 ; i < chars . length ; i ++ ) { chars [ i ] = ( char ) ( bytes [ offset + 1 + i ] & 0xFF ) ; } return new String ( chars ) ; }
Extracted value is the extracted string using the first byte as the length .
2,751
public void readEntries ( BufferedReader lineReader , ErrorCallBack errorCallBack ) throws IOException { final MagicEntry [ ] levelParents = new MagicEntry [ MAX_LEVELS ] ; MagicEntry previousEntry = null ; while ( true ) { String line = lineReader . readLine ( ) ; if ( line == null ) { break ; } if ( line . length ( ) == 0 || line . charAt ( 0 ) == '#' ) { continue ; } MagicEntry entry ; try { entry = MagicEntryParser . parseLine ( previousEntry , line , errorCallBack ) ; if ( entry == null ) { continue ; } } catch ( IllegalArgumentException e ) { if ( errorCallBack != null ) { errorCallBack . error ( line , e . getMessage ( ) , e ) ; } continue ; } int level = entry . getLevel ( ) ; if ( previousEntry == null && level != 0 ) { if ( errorCallBack != null ) { errorCallBack . error ( line , "first entry of the file but the level " + level + " should be 0" , null ) ; } continue ; } if ( level == 0 ) { entryList . add ( entry ) ; } else if ( levelParents [ level - 1 ] == null ) { if ( errorCallBack != null ) { errorCallBack . error ( line , "entry has level " + level + " but no parent entry with level " + ( level - 1 ) , null ) ; } continue ; } else { levelParents [ level - 1 ] . addChild ( entry ) ; } levelParents [ level ] = entry ; previousEntry = entry ; } }
Read the entries so later we can find matches with them .
2,752
public void optimizeFirstBytes ( ) { for ( MagicEntry entry : entryList ) { byte [ ] startingBytes = entry . getStartsWithByte ( ) ; if ( startingBytes == null || startingBytes . length == 0 ) { continue ; } int index = ( 0xFF & startingBytes [ 0 ] ) ; if ( firstByteEntryLists [ index ] == null ) { firstByteEntryLists [ index ] = new ArrayList < MagicEntry > ( ) ; } firstByteEntryLists [ index ] . add ( entry ) ; } }
Optimize the magic entries by removing the first - bytes information into their own lists
2,753
public ContentInfo findMatch ( byte [ ] bytes ) { if ( bytes . length == 0 ) { return ContentInfo . EMPTY_INFO ; } int index = ( 0xFF & bytes [ 0 ] ) ; if ( index < firstByteEntryLists . length && firstByteEntryLists [ index ] != null ) { ContentInfo info = findMatch ( bytes , firstByteEntryLists [ index ] ) ; if ( info != null ) { return info ; } } return findMatch ( bytes , entryList ) ; }
Find and return a match for the associated bytes .
2,754
public void format ( StringBuilder sb , Object value ) { if ( prefix != null ) { sb . append ( prefix ) ; } if ( percentExpression != null && value != null ) { percentExpression . append ( value , sb ) ; } if ( suffix != null ) { sb . append ( suffix ) ; } }
Formats the extracted value assigned and returns the associated string
2,755
public ContentInfo findMatch ( File file ) throws IOException { int readSize = fileReadSize ; if ( ! file . exists ( ) ) { throw new IOException ( "File does not exist: " + file ) ; } if ( ! file . canRead ( ) ) { throw new IOException ( "File is not readable: " + file ) ; } long length = file . length ( ) ; if ( length <= 0 ) { return ContentInfo . EMPTY_INFO ; } if ( length < readSize ) { readSize = ( int ) length ; } byte [ ] bytes = new byte [ readSize ] ; FileInputStream fis = null ; try { fis = new FileInputStream ( file ) ; int numRead = fis . read ( bytes ) ; if ( numRead <= 0 ) { return ContentInfo . EMPTY_INFO ; } if ( numRead < bytes . length ) { bytes = Arrays . copyOf ( bytes , numRead ) ; } } finally { closeQuietly ( fis ) ; } return findMatch ( bytes ) ; }
Return the content type for the file or null if none of the magic entries matched .
2,756
public ContentInfo findMatch ( byte [ ] bytes ) { if ( bytes . length == 0 ) { return ContentInfo . EMPTY_INFO ; } else { return magicEntries . findMatch ( bytes ) ; } }
Return the content type from the associated bytes or null if none of the magic entries matched .
2,757
public static ContentInfo findExtensionMatch ( String name ) { name = name . toLowerCase ( ) ; ContentType type = ContentType . fromFileExtension ( name ) ; if ( type != ContentType . OTHER ) { return new ContentInfo ( type ) ; } int index = name . lastIndexOf ( '.' ) ; if ( index < 0 || index == name . length ( ) - 1 ) { return null ; } type = ContentType . fromFileExtension ( name . substring ( index + 1 ) ) ; if ( type == ContentType . OTHER ) { return null ; } else { return new ContentInfo ( type ) ; } }
Return the content type if the extension from the file - name matches our internal list . This can either be just the extension part or it will look for the last period and take the string after that as the extension .
2,758
public static ContentInfo findMimeTypeMatch ( String mimeType ) { ContentType type = ContentType . fromMimeType ( mimeType . toLowerCase ( ) ) ; if ( type == ContentType . OTHER ) { return null ; } else { return new ContentInfo ( type ) ; } }
Return the content type if the mime - type matches our internal list .
2,759
static List < PatternLevel > readLevelResourceFile ( InputStream stream ) { List < PatternLevel > levels = null ; if ( stream != null ) { try { levels = configureClassLevels ( stream ) ; } catch ( IOException e ) { System . err . println ( "IO exception reading the log properties file '" + LOCAL_LOG_PROPERTIES_FILE + "': " + e ) ; } finally { try { stream . close ( ) ; } catch ( IOException e ) { } } } return levels ; }
Read in our levels from our configuration file .
2,760
private void loadFile ( String resourcePath ) { InputStream stream = getClass ( ) . getResourceAsStream ( resourcePath ) ; if ( stream == null ) { throw new IllegalArgumentException ( resourcePath + " is missing" ) ; } BufferedReader lineReader = null ; try { lineReader = new BufferedReader ( new InputStreamReader ( new GZIPInputStream ( stream ) ) ) ; stream = null ; lineReader . readLine ( ) ; while ( true ) { String line = lineReader . readLine ( ) ; if ( line == null ) { break ; } String [ ] parsed = line . split ( "," ) ; if ( parsed . length < 3 ) { continue ; } String name = parsed [ 0 ] ; String mimeType = parsed [ 1 ] ; if ( mimeType . isEmpty ( ) ) { continue ; } String reference = parsed [ 2 ] ; if ( reference . startsWith ( "\"" ) ) { String nextLine = lineReader . readLine ( ) ; nextLine = nextLine . replaceAll ( "\\s+" , "" ) ; reference += nextLine ; } IanaEntry ianaEntry = new IanaEntry ( name , mimeType , parseReference ( reference ) ) ; entryMap . put ( mimeType , ianaEntry ) ; } } catch ( IOException ioe ) { throw new RuntimeException ( "Error when loading " + resourcePath , ioe ) ; } finally { closeQuietly ( lineReader ) ; closeQuietly ( stream ) ; } }
Loads the IANA database from the specified file .
2,761
ContentInfo matchBytes ( byte [ ] bytes ) { ContentData data = matchBytes ( bytes , 0 , 0 , null ) ; if ( data == null || data . name == MagicEntryParser . UNKNOWN_NAME ) { return null ; } else { return new ContentInfo ( data . name , data . mimeType , data . sb . toString ( ) , data . partial ) ; } }
Returns the content type associated with the bytes or null if it does not match .
2,762
private ContentData matchBytes ( byte [ ] bytes , int prevOffset , int level , ContentData contentData ) { int offset = this . offset ; if ( offsetInfo != null ) { Integer maybeOffset = offsetInfo . getOffset ( bytes ) ; if ( maybeOffset == null ) { return null ; } offset = maybeOffset ; } if ( addOffset ) { offset = prevOffset + offset ; } boolean required = ( testValue == null && formatter != null ) ; Object val = matcher . extractValueFromBytes ( offset , bytes , required ) ; if ( val == null ) { return null ; } if ( testValue != null ) { MutableOffset mutableOffset = new MutableOffset ( offset ) ; val = matcher . isMatch ( testValue , andValue , unsignedType , val , mutableOffset , bytes ) ; if ( val == null ) { return null ; } offset = mutableOffset . offset ; } if ( contentData == null ) { contentData = new ContentData ( name , mimeType , level ) ; contentData . partial = true ; } if ( formatter != null ) { if ( clearFormat ) { contentData . sb . setLength ( 0 ) ; } if ( formatSpacePrefix && contentData . sb . length ( ) > 0 ) { contentData . sb . append ( ' ' ) ; } matcher . renderValue ( contentData . sb , val , formatter ) ; } logger . trace ( "matched data: {}: {}" , this , contentData ) ; if ( children == null ) { contentData . partial = false ; } else { boolean allOptional = true ; for ( MagicEntry entry : children ) { if ( ! entry . isOptional ( ) ) { allOptional = false ; } entry . matchBytes ( bytes , offset , level + 1 , contentData ) ; } if ( allOptional ) { contentData . partial = false ; } } if ( name != MagicEntryParser . UNKNOWN_NAME && contentData . name == MagicEntryParser . UNKNOWN_NAME ) { contentData . name = name ; } if ( mimeType != null && ( contentData . mimeType == null || level > contentData . mimeTypeLevel ) ) { contentData . mimeType = mimeType ; contentData . mimeTypeLevel = level ; } return contentData ; }
Main processing method which can go recursive .
2,763
public static String preProcessPattern ( String pattern ) { int index = pattern . indexOf ( '\\' ) ; if ( index < 0 ) { return pattern ; } StringBuilder sb = new StringBuilder ( ) ; for ( int pos = 0 ; pos < pattern . length ( ) ; ) { char ch = pattern . charAt ( pos ) ; if ( ch != '\\' ) { sb . append ( ch ) ; pos ++ ; continue ; } if ( pos + 1 >= pattern . length ( ) ) { sb . append ( ch ) ; break ; } ch = pattern . charAt ( ++ pos ) ; switch ( ch ) { case 'b' : sb . append ( '\b' ) ; pos ++ ; break ; case 'f' : sb . append ( '\f' ) ; pos ++ ; break ; case 'n' : sb . append ( '\n' ) ; pos ++ ; break ; case '0' : case '1' : case '2' : case '3' : case '4' : case '5' : case '6' : case '7' : { pos += radixCharsToChar ( sb , pattern , pos , 3 , 8 ) ; break ; } case 'r' : sb . append ( '\r' ) ; pos ++ ; break ; case 't' : sb . append ( '\t' ) ; pos ++ ; break ; case 'x' : { int adjust = radixCharsToChar ( sb , pattern , pos + 1 , 2 , 16 ) ; if ( adjust > 0 ) { pos += 1 + adjust ; } else { sb . append ( ch ) ; pos ++ ; } break ; } case ' ' : case '\\' : default : sb . append ( ch ) ; pos ++ ; break ; } } return sb . toString ( ) ; }
Pre - processes the pattern by handling backslash escapes such as \ b and \ 007 .
2,764
public static int staticCompare ( Number extractedValue , Number testValue ) { long extractedLong = extractedValue . longValue ( ) ; long testLong = testValue . longValue ( ) ; if ( extractedLong > testLong ) { return 1 ; } else if ( extractedLong < testLong ) { return - 1 ; } else { return 0 ; } }
Static compare of longs which are unsigned or signed .
2,765
protected String findOffsetMatch ( TestInfo info , int startOffset , MutableOffset mutableOffset , final byte [ ] bytes , final char [ ] chars , final int maxPos ) { if ( startOffset < 0 ) { return null ; } int targetPos = startOffset ; boolean lastMagicCompactWhitespace = false ; for ( int magicPos = 0 ; magicPos < info . pattern . length ( ) ; magicPos ++ ) { char magicCh = info . pattern . charAt ( magicPos ) ; boolean lastChar = ( magicPos == info . pattern . length ( ) - 1 ) ; if ( targetPos >= maxPos ) { return null ; } char targetCh ; if ( bytes == null ) { targetCh = chars [ targetPos ] ; } else { targetCh = charFromByte ( bytes , targetPos ) ; } targetPos ++ ; if ( info . operator . doTest ( targetCh , magicCh , lastChar ) ) { if ( info . compactWhiteSpace ) { lastMagicCompactWhitespace = Character . isWhitespace ( magicCh ) ; } continue ; } if ( ( lastMagicCompactWhitespace || info . optionalWhiteSpace ) && Character . isWhitespace ( targetCh ) ) { do { if ( targetPos >= maxPos ) { break ; } if ( bytes == null ) { targetCh = chars [ targetPos ] ; } else { targetCh = charFromByte ( bytes , targetPos ) ; } targetPos ++ ; } while ( Character . isWhitespace ( targetCh ) ) ; if ( info . operator . doTest ( targetCh , magicCh , lastChar ) ) { if ( info . compactWhiteSpace ) { lastMagicCompactWhitespace = Character . isWhitespace ( magicCh ) ; } continue ; } } if ( info . caseInsensitive && Character . isLowerCase ( magicCh ) ) { if ( info . operator . doTest ( Character . toLowerCase ( targetCh ) , magicCh , lastChar ) ) { continue ; } } return null ; } char [ ] resultChars ; if ( bytes == null ) { resultChars = Arrays . copyOfRange ( chars , startOffset , targetPos ) ; } else { resultChars = new char [ targetPos - startOffset ] ; for ( int i = 0 ; i < resultChars . length ; i ++ ) { resultChars [ i ] = charFromByte ( bytes , startOffset + i ) ; } } mutableOffset . offset = targetPos ; return new String ( resultChars ) ; }
Find offset match either in an array of bytes or chars which ever is not null .
2,766
public String next ( ) { if ( next == null ) { throw new NoSuchElementException ( ) ; } String s = next ; advance ( ) ; return s ; }
Returns the next word from the reader that has passed the filter .
2,767
private synchronized void print ( String output ) { if ( generateOneDoc ) writer . print ( output ) ; else writer . println ( output ) ; }
Writes strings to the resulting file .
2,768
public void finish ( ) { for ( Map . Entry < String , String > entry : posTags . entrySet ( ) ) { posWriter . println ( entry . getKey ( ) + " " + entry . getValue ( ) ) ; } posWriter . flush ( ) ; posWriter . close ( ) ; writer . flush ( ) ; writer . close ( ) ; }
Finalizes the writing of documents .
2,769
public static void findXmlFiles ( ChildesParser parser , boolean utterancePerDoc , File directory ) { File [ ] files = directory . listFiles ( ) ; for ( File file : files ) { if ( file . isDirectory ( ) ) findXmlFiles ( parser , utterancePerDoc , file ) ; else if ( file . isFile ( ) && file . getPath ( ) . endsWith ( ".xml" ) ) parser . parseFile ( file , utterancePerDoc ) ; } }
Recursively finds any xml documents to parse .
2,770
private DependencyTreeNode getNextNode ( DependencyRelation prev , DependencyRelation cur ) { return ( prev . headNode ( ) == cur . headNode ( ) || prev . dependentNode ( ) == cur . headNode ( ) ) ? cur . dependentNode ( ) : cur . headNode ( ) ; }
Given the nodes in the previous relation determine which of the nodes in the next relation is new and return that .
2,771
private static double [ ] toArray ( DoubleVector v , int length ) { double [ ] arr = new double [ length ] ; for ( int i = 0 ; i < arr . length ; ++ i ) { arr [ i ] = v . get ( i ) ; } return arr ; }
Returns an array of the specified length using the data in the provided vector . This method allows row vectors to be converted to arrays based on the size of the matrix at the time of the call thereby prevent changes in length due to external vector modifications .
2,772
private Collection < WordSimilarity > parse ( File word353file ) { Collection < WordSimilarity > pairs = new LinkedList < WordSimilarity > ( ) ; try { BufferedReader br = new BufferedReader ( new FileReader ( word353file ) ) ; br . readLine ( ) ; for ( String line = null ; ( line = br . readLine ( ) ) != null ; ) { if ( line . startsWith ( "#" ) || line . length ( ) == 0 ) { continue ; } String [ ] wordsAndNum = line . split ( "\\s+" ) ; if ( wordsAndNum . length != 3 ) { throw new Error ( "Unexpected line formatting: " + line ) ; } pairs . add ( new SimpleWordSimilarity ( wordsAndNum [ 0 ] , wordsAndNum [ 1 ] , Double . parseDouble ( wordsAndNum [ 2 ] ) ) ) ; } } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } return pairs ; }
Parses the WordSimilarity353 file and returns the set of judgements .
2,773
public Iterator < MatrixEntry > iterator ( ) { try { return MatrixIO . getMatrixFileIterator ( matrixFile , format ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } }
Returns an iterator over all the entries in the matrix . The order in which entries are returned is format - specific ; no guarantee is provided about the ordering .
2,774
private void loadFromFormat ( InputStream is , SSpaceFormat format ) throws IOException { termToIndex = new LinkedHashMap < String , Integer > ( ) ; Matrix m = null ; long start = System . currentTimeMillis ( ) ; switch ( format ) { case TEXT : m = Matrices . synchronizedMatrix ( loadText ( is ) ) ; break ; case BINARY : m = Matrices . synchronizedMatrix ( loadBinary ( is ) ) ; break ; case SPARSE_TEXT : m = loadSparseText ( is ) ; break ; case SPARSE_BINARY : m = loadSparseBinary ( is ) ; break ; } if ( LOGGER . isLoggable ( Level . FINE ) ) { LOGGER . fine ( "loaded " + format + " .sspace file in " + ( System . currentTimeMillis ( ) - start ) + "ms" ) ; } wordSpace = m ; }
Loads the semantic space data from the specified stream using the format to determine how the data is layed out internally within the stream .
2,775
public Graph < T > next ( ) { if ( nextSubgraphs . isEmpty ( ) ) throw new NoSuchElementException ( ) ; Graph < T > next = nextSubgraphs . poll ( ) ; if ( nextSubgraphs . isEmpty ( ) ) advance ( ) ; return next ; }
Returns the next subgraph from the backing graph .
2,776
private static void add ( IntegerVector semantics , TernaryVector index ) { synchronized ( semantics ) { for ( int p : index . positiveDimensions ( ) ) semantics . add ( p , 1 ) ; for ( int n : index . negativeDimensions ( ) ) semantics . add ( n , - 1 ) ; } }
Atomically adds the values of the index vector to the semantic vector . This is a special case addition operation that only iterates over the non - zero values of the index vector .
2,777
public SimpleDependencyPath copy ( ) { SimpleDependencyPath copy = new SimpleDependencyPath ( ) ; copy . path . addAll ( path ) ; copy . nodes . addAll ( nodes ) ; return copy ; }
Returns a copy of this dependency path
2,778
public SimpleDependencyPath extend ( DependencyRelation relation ) { SimpleDependencyPath copy = copy ( ) ; DependencyTreeNode last = last ( ) ; copy . nodes . add ( ( relation . headNode ( ) . equals ( last ) ) ? relation . dependentNode ( ) : relation . headNode ( ) ) ; copy . path . add ( relation ) ; return copy ; }
Returns a copy of this dependency path that has the provided related appended to the end of its path sequence .
2,779
public static Matrix create ( int rows , int cols , boolean isDense ) { long size = ( isDense ) ? ( long ) rows * ( long ) cols * BYTES_PER_DOUBLE : ( long ) ( rows * ( long ) cols * ( BYTES_PER_DOUBLE * SPARSE_DENSITY ) ) ; Runtime r = Runtime . getRuntime ( ) ; long available = r . freeMemory ( ) ; if ( size < available ) { if ( isDense ) { if ( size > Integer . MAX_VALUE ) { LOGGER . finer ( "too big for ArrayMatrix; creating new " + "OnDiskMatrix" ) ; return new OnDiskMatrix ( rows , cols ) ; } else { LOGGER . finer ( "creating new (in memory) ArrayMatrix" ) ; return new ArrayMatrix ( rows , cols ) ; } } else { LOGGER . finer ( "can fit sparse in memory; creating " + "new SparseMatrix" ) ; return new YaleSparseMatrix ( rows , cols ) ; } } else { LOGGER . finer ( "cannot fit in memory; creating new OnDiskMatrix" ) ; return new OnDiskMatrix ( rows , cols ) ; } }
Creates a matrix of the given dimensions and selects the matrix implementation by considering the size and density of the new matrix with respect to the available memory for the JVM .
2,780
public static Matrix copy ( Matrix matrix ) { Matrix copiedMatrix = null ; if ( matrix instanceof SparseMatrix ) copiedMatrix = Matrices . create ( matrix . rows ( ) , matrix . columns ( ) , Type . SPARSE_IN_MEMORY ) ; else copiedMatrix = Matrices . create ( matrix . rows ( ) , matrix . columns ( ) , Type . DENSE_IN_MEMORY ) ; return copyTo ( matrix , copiedMatrix ) ; }
Returns a copied version of a given matrix . The returned matrix will have the same dimensionality values and sparsity but it may not have the same exact sub - type .
2,781
public String next ( ) { if ( next == null ) throw new NoSuchElementException ( ) ; String replacement = replacementMap . get ( next ) ; replacement = ( replacement == null ) ? next : replacement ; advance ( ) ; return replacement ; }
Return the next token s replacement if such a replacement exists . If no replacement for the next token exists the original token is returned .
2,782
public Color next ( ) { int r = rand . nextInt ( 256 ) ; int g = rand . nextInt ( 256 ) ; int b = rand . nextInt ( 256 ) ; return ( seed == null ) ? new Color ( r , g , b ) : new Color ( ( r + seed . getRed ( ) ) / 2 , ( g + seed . getGreen ( ) ) / 2 , ( b + seed . getBlue ( ) ) / 2 ) ; }
Returns the next random color
2,783
public static synchronized void setProperties ( Properties props ) { wordLimit = Integer . parseInt ( props . getProperty ( TOKEN_COUNT_LIMIT_PROPERTY , "0" ) ) ; String filterProp = props . getProperty ( TOKEN_FILTER_PROPERTY ) ; filter = ( filterProp != null ) ? TokenFilter . loadFromSpecification ( filterProp , resourceFinder ) : null ; String stemmerProp = props . getProperty ( STEMMER_PROPERTY ) ; if ( stemmerProp != null ) stemmer = ReflectionUtil . < Stemmer > getObjectInstance ( stemmerProp ) ; String compoundTokensProp = props . getProperty ( COMPOUND_TOKENS_FILE_PROPERTY ) ; if ( compoundTokensProp != null ) { compoundTokens = new LinkedHashSet < String > ( ) ; try { BufferedReader br = resourceFinder . open ( compoundTokensProp ) ; for ( String line = null ; ( line = br . readLine ( ) ) != null ; ) { compoundTokens . add ( line ) ; } for ( Map . Entry < Thread , CompoundWordIterator > e : compoundIterators . entrySet ( ) ) { BufferedReader dummyBuffer = new BufferedReader ( new StringReader ( "" ) ) ; e . setValue ( new CompoundWordIterator ( dummyBuffer , compoundTokens ) ) ; } } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } } else { compoundTokens = null ; } String replacementProp = props . getProperty ( TOKEN_REPLACEMENT_FILE_PROPERTY ) ; if ( replacementProp != null ) { try { BufferedReader br = resourceFinder . open ( replacementProp ) ; replacementMap = new HashMap < String , String > ( ) ; String line = null ; while ( ( line = br . readLine ( ) ) != null ) { String [ ] termReplacement = line . split ( "\\s+" ) ; replacementMap . put ( termReplacement [ 0 ] , termReplacement [ 1 ] ) ; } } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } } else replacementMap = null ; }
Reconfigures the type of iterator returned by this factory based on the specified properties .
2,784
private static Iterator < String > getBaseIterator ( BufferedReader reader , boolean keepOrdering ) { Iterator < String > finalIterator = new WordIterator ( reader ) ; if ( replacementMap != null ) finalIterator = new WordReplacementIterator ( finalIterator , replacementMap ) ; if ( compoundTokens != null ) { CompoundWordIterator cwi = compoundIterators . get ( Thread . currentThread ( ) ) ; if ( cwi == null ) { cwi = new CompoundWordIterator ( finalIterator , compoundTokens ) ; compoundIterators . put ( Thread . currentThread ( ) , cwi ) ; } else { cwi . reset ( finalIterator ) ; } finalIterator = cwi ; } if ( wordLimit > 0 ) finalIterator = new LimitedIterator < String > ( finalIterator , wordLimit ) ; if ( filter != null ) { finalIterator = ( keepOrdering ) ? new OrderPreservingFilteredIterator ( finalIterator , filter ) : new FilteredIterator ( finalIterator , filter ) ; } if ( stemmer != null ) finalIterator = new StemmingIterator ( finalIterator , stemmer ) ; return finalIterator ; }
Returns an iterator for the basic tokenization of the stream before filtering has been applied to the tokens .
2,785
@ SuppressWarnings ( "unchecked" ) private void updateSemantics ( SemanticVector toUpdate , String cooccurringWord , TernaryVector iv ) { SemanticVector prevWordSemantics = getSemanticVector ( cooccurringWord ) ; Integer occurrences = wordToOccurrences . get ( cooccurringWord ) ; if ( occurrences == null ) occurrences = 0 ; double semanticWeight = 1d / ( Math . exp ( occurrences / historyDecayRate ) ) ; add ( toUpdate , iv , impactRate * ( 1 - semanticWeight ) ) ; toUpdate . addVector ( prevWordSemantics , impactRate * semanticWeight ) ; }
Update the semantics using the weighed combination of the semantics of the co - occurring word and the provided index vector . Note that the index vector is provided so that the caller can permute it as necessary .
2,786
private static void add ( DoubleVector semantics , TernaryVector index , double percentage ) { for ( int p : index . positiveDimensions ( ) ) semantics . add ( p , percentage ) ; for ( int n : index . negativeDimensions ( ) ) semantics . add ( n , - percentage ) ; }
Adds the index vector to the semantic vector using the percentage to specify how much of each dimesion is added .
2,787
private static DoubleVector generateInitialVector ( int length , double mean , double std ) { DoubleVector vector = new DenseVector ( length ) ; for ( int i = 0 ; i < length ; ++ i ) { double v = RANDOM . nextGaussian ( ) ; v = std * v + mean ; vector . set ( i , v ) ; } return vector ; }
Generates a simple random vector .
2,788
private static double dotProduct ( DoubleVector u , DoubleVector v ) { double dot = 0 ; for ( int i = 0 ; i < u . length ( ) ; ++ i ) { double a = u . get ( i ) ; double b = v . get ( i ) ; dot += u . get ( i ) * v . get ( i ) ; } return dot ; }
Compute the dot product between two vectors .
2,789
private void updateTypeCounts ( T type , int delta ) { if ( ! typeCounts . containsKey ( type ) ) { assert delta > 0 : "removing edge type that was not originally present" ; typeCounts . put ( type , delta ) ; } else { int curCount = typeCounts . get ( type ) ; int newCount = curCount + delta ; assert newCount >= 0 : "removing edge type that was not originally present" ; if ( newCount == 0 ) typeCounts . remove ( type ) ; else typeCounts . put ( type , newCount ) ; } }
Updates how many edges have this type in the graph
2,790
private static File getTempMatrixFile ( ) { File tmp = null ; try { tmp = File . createTempFile ( "matlab-sparse-matrix" , ".dat" ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } tmp . deleteOnExit ( ) ; return tmp ; }
Returns a temporary file that will be deleted on JVM exit .
2,791
private IntegerVector getSemanticVector ( String word ) { IntegerVector v = wordSpace . get ( word ) ; if ( v == null ) { synchronized ( this ) { v = wordSpace . get ( word ) ; if ( v == null ) { v = new CompactSparseIntegerVector ( vectorLength ) ; wordSpace . put ( word , v ) ; } } } return v ; }
Returns the current semantic vector for the provided word . If the word is not currently in the semantic space a vector is added for it and returned .
2,792
public static void shuffle ( int [ ] arr , Random rand ) { int size = arr . length ; for ( int i = size ; i > 1 ; i -- ) { int tmp = arr [ i - 1 ] ; int r = rand . nextInt ( i ) ; arr [ i - 1 ] = arr [ r ] ; arr [ r ] = tmp ; } }
Randomly shuffles the contents of the provided array
2,793
private void addInitial ( G g ) { Set < T > typeCounts = g . edgeTypes ( ) ; LinkedList < Map . Entry < G , Integer > > graphs = typesToGraphs . get ( typeCounts ) ; if ( graphs == null ) { graphs = new LinkedList < Map . Entry < G , Integer > > ( ) ; typesToGraphs . put ( new HashSet < T > ( typeCounts ) , graphs ) ; } graphs . add ( new SimpleEntry < G , Integer > ( g , 0 ) ) ; }
Adds an initial set of valid motifs to this counter with no counts . This method enables the fixed - motif constructor to initialize the set of valid motifs prior to counting .
2,794
private static void usage ( ArgOptions options ) { System . out . println ( "Fanmod 1.0, " + "usage: java -jar fanmod.jar [options] input.graph output.serialized \n\n" + options . prettyPrint ( ) + "\nThe edge file format is:\n" + " vertex1 vertex2 [edge_label]\n" + OptionDescriptions . HELP_DESCRIPTION ) ; }
Prints the options and supported commands used by this program .
2,795
private static double entropy ( double count , double sum ) { double p = count / sum ; return Math . log ( p ) * p ; }
Computes the entropy of a raw count given a particular summation using the natural log .
2,796
public int getCount ( T obj ) { int objIndex = ( allowNewIndices ) ? objectIndices . index ( obj ) : objectIndices . find ( obj ) ; return ( objIndex < 0 ) ? 0 : indexToCount . get ( objIndex ) ; }
Returns the number of times the specified object has been seen by this counter .
2,797
public static int log2 ( int n ) { int log = 0 ; for ( int k = 1 ; k < n ; k *= 2 , log ++ ) ; if ( n != ( 1 << log ) ) return - 1 ; return log ; }
Return the integer log base 2 of n or - 1 if n is not an integral power of 2 .
2,798
public static double sum ( DoubleVector v ) { double sum = 0 ; if ( v instanceof SparseVector ) { for ( int nz : ( ( SparseVector ) v ) . getNonZeroIndices ( ) ) sum += v . get ( nz ) ; } else { int len = v . length ( ) ; for ( int i = 0 ; i < len ; ++ i ) sum += v . get ( i ) ; } return sum ; }
Sums the values in the vector returning the result .
2,799
private static void execute ( File dataMatrixFile , File affMatrixFile , int dims , File outputMatrix ) throws IOException { if ( isMatlabAvailable ( ) ) invokeMatlab ( dataMatrixFile , affMatrixFile , dims , outputMatrix ) ; else if ( isOctaveAvailable ( ) ) invokeOctave ( dataMatrixFile , affMatrixFile , dims , outputMatrix ) ; else throw new IllegalStateException ( "Cannot find Matlab or Octave to invoke LPP" ) ; }
Executes the LPP script thereby computing the locality preserving projection of the data matrix to the specified number of dimension using the affinity matrix to determine locality . The result is written to the output file .