signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
---|---|
public class JSONStringer { /** * Inserts any necessary separators and whitespace before a name . Also
* adjusts the stack to expect the key ' s value . */
private void beforeKey ( ) throws JSONException { } } | Scope context = peek ( ) ; if ( context == Scope . NONEMPTY_OBJECT ) { // first in object
out . append ( ',' ) ; } else if ( context != Scope . EMPTY_OBJECT ) { // not in an object !
throw new JSONException ( "Nesting problem" ) ; } newline ( ) ; replaceTop ( Scope . DANGLING_KEY ) ; |
public class RxApollo { /** * Converts an { @ link ApolloCall } to a Observable . The number of emissions this Observable will have is based on the
* { @ link ResponseFetcher } used with the call .
* @ param call the ApolloCall to convert
* @ param < T > the value type
* @ param backpressureMode The { @ link rx . Emitter . BackpressureMode } to use .
* @ return the converted Observable */
@ NotNull public static < T > Observable < Response < T > > from ( @ NotNull final ApolloCall < T > call , Emitter . BackpressureMode backpressureMode ) { } } | checkNotNull ( call , "call == null" ) ; return Observable . create ( new Action1 < Emitter < Response < T > > > ( ) { @ Override public void call ( final Emitter < Response < T > > emitter ) { final AtomicBoolean canceled = new AtomicBoolean ( ) ; emitter . setCancellation ( new Cancellable ( ) { @ Override public void cancel ( ) throws Exception { canceled . set ( true ) ; call . cancel ( ) ; } } ) ; call . enqueue ( new ApolloCall . Callback < T > ( ) { @ Override public void onResponse ( @ NotNull Response < T > response ) { if ( ! canceled . get ( ) ) { emitter . onNext ( response ) ; } } @ Override public void onFailure ( @ NotNull ApolloException e ) { Exceptions . throwIfFatal ( e ) ; if ( ! canceled . get ( ) ) { emitter . onError ( e ) ; } } @ Override public void onStatusEvent ( @ NotNull ApolloCall . StatusEvent event ) { if ( ! canceled . get ( ) ) { if ( event == ApolloCall . StatusEvent . COMPLETED ) { emitter . onCompleted ( ) ; } } } } ) ; } } , backpressureMode ) ; |
public class ListFacetNamesResult { /** * The names of facets that exist within the schema .
* @ param facetNames
* The names of facets that exist within the schema . */
public void setFacetNames ( java . util . Collection < String > facetNames ) { } } | if ( facetNames == null ) { this . facetNames = null ; return ; } this . facetNames = new java . util . ArrayList < String > ( facetNames ) ; |
public class NelsonData { /** * Fourteen ( or more ) points in a row alternate in direction , increasing then decreasing */
private boolean rule4 ( Double sample ) { } } | if ( null == rule4PreviousSample || sample . doubleValue ( ) == rule4PreviousSample . doubleValue ( ) ) { rule4PreviousSample = sample ; rule4PreviousDirection = "=" ; rule4Count = 0 ; return false ; } String sampleDirection = ( sample > rule4PreviousSample ) ? ">" : "<" ; if ( sampleDirection . equals ( rule4PreviousDirection ) ) { rule4Count = 0 ; } else { ++ rule4Count ; } rule4PreviousSample = sample ; rule4PreviousDirection = sampleDirection ; return Math . abs ( rule4Count ) >= 14 ; |
public class CommitManager { /** * It stops tracking keys committed .
* @ param track Flag to stop tracking keys for local site state transfer or for remote site state transfer . */
public final void stopTrack ( Flag track ) { } } | setTrack ( track , false ) ; if ( ! trackStateTransfer && ! trackXSiteStateTransfer ) { if ( trace ) { log . tracef ( "Tracking is disabled. Clear tracker: %s" , tracker ) ; } tracker . clear ( ) ; } else { for ( Iterator < Map . Entry < Object , DiscardPolicy > > iterator = tracker . entrySet ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { if ( iterator . next ( ) . getValue ( ) . update ( trackStateTransfer , trackXSiteStateTransfer ) ) { iterator . remove ( ) ; } } } |
public class dnszone { /** * Use this API to unset the properties of dnszone resources .
* Properties that need to be unset are specified in args array . */
public static base_responses unset ( nitro_service client , String zonename [ ] , String args [ ] ) throws Exception { } } | base_responses result = null ; if ( zonename != null && zonename . length > 0 ) { dnszone unsetresources [ ] = new dnszone [ zonename . length ] ; for ( int i = 0 ; i < zonename . length ; i ++ ) { unsetresources [ i ] = new dnszone ( ) ; unsetresources [ i ] . zonename = zonename [ i ] ; } result = unset_bulk_request ( client , unsetresources , args ) ; } return result ; |
public class Manager { /** * Returns the database with the given name , or null if it doesn ' t exist .
* Multiple calls with the same name will return the same { @ link Database } instance .
* This is equivalent to calling { @ link # openDatabase ( String , DatabaseOptions ) }
* with a default set of options . */
@ InterfaceAudience . Public public Database getExistingDatabase ( String name ) throws CouchbaseLiteException { } } | DatabaseOptions options = getDefaultOptions ( name ) ; return openDatabase ( name , options ) ; |
public class SimpleIoc { /** * Register @ Bean marked objects */
private Object put ( String name , Class < ? > beanClass , boolean isSingleton ) { } } | BeanDefine beanDefine = this . createBeanDefine ( beanClass , isSingleton ) ; if ( pool . put ( name , beanDefine ) != null ) { log . warn ( "Duplicated Bean: {}" , name ) ; } // add interface 、 put to pool
Class < ? > [ ] interfaces = beanClass . getInterfaces ( ) ; if ( interfaces . length > 0 ) { for ( Class < ? > interfaceClazz : interfaces ) { if ( null != this . getBean ( interfaceClazz ) ) { break ; } this . put ( interfaceClazz . getName ( ) , beanDefine ) ; } } return Objects . requireNonNull ( beanDefine ) . getBean ( ) ; |
public class ClassPathBuilder { /** * Add worklist items from given extensions directory .
* @ param workList
* the worklist
* @ param extDir
* an extensions directory */
private void addWorkListItemsForExtDir ( LinkedList < WorkListItem > workList , String extDir ) { } } | File dir = new File ( extDir ) ; File [ ] fileList = dir . listFiles ( ( FileFilter ) pathname -> { String path = pathname . getPath ( ) ; boolean isArchive = Archive . isArchiveFileName ( path ) ; return isArchive ; } ) ; if ( fileList == null ) { return ; } for ( File archive : fileList ) { addToWorkList ( workList , new WorkListItem ( classFactory . createFilesystemCodeBaseLocator ( archive . getPath ( ) ) , false , ICodeBase . Discovered . IN_SYSTEM_CLASSPATH ) ) ; } |
public class ZoneNameProviderSPI { /** * ~ Methoden - - - - - */
@ Override public Set < String > getPreferredIDs ( Locale locale , boolean smart ) { } } | String country = FormatUtils . getRegion ( locale ) ; if ( smart ) { if ( country . equals ( "US" ) ) { Set < String > tzids = new LinkedHashSet < > ( ) ; tzids . add ( "America/New_York" ) ; tzids . add ( "America/Chicago" ) ; tzids . add ( "America/Denver" ) ; tzids . add ( "America/Los_Angeles" ) ; tzids . add ( "America/Anchorage" ) ; tzids . add ( "Pacific/Honolulu" ) ; tzids . add ( "America/Adak" ) ; return Collections . unmodifiableSet ( tzids ) ; } else { String primaryZone = PRIMARIES . get ( country ) ; if ( primaryZone != null ) { return Collections . singleton ( primaryZone ) ; } } } Set < String > result = TERRITORIES . get ( country ) ; if ( result == null ) { result = Collections . emptySet ( ) ; } return result ; |
public class MatlabSparseMatrixBuilder { /** * Returns a temporary file that will be deleted on JVM exit .
* @ return a temporary file used to store a matrix */
private static File getTempMatrixFile ( ) { } } | File tmp = null ; try { tmp = File . createTempFile ( "matlab-sparse-matrix" , ".dat" ) ; } catch ( IOException ioe ) { throw new IOError ( ioe ) ; } tmp . deleteOnExit ( ) ; return tmp ; |
public class BookmarkManager { /** * Returns all currently bookmarked conferences .
* @ return returns all currently bookmarked conferences
* @ throws XMPPErrorException
* @ throws NoResponseException
* @ throws NotConnectedException
* @ throws InterruptedException
* @ see BookmarkedConference */
public List < BookmarkedConference > getBookmarkedConferences ( ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } } | retrieveBookmarks ( ) ; return Collections . unmodifiableList ( bookmarks . getBookmarkedConferences ( ) ) ; |
public class StatementManager { /** * Links a session with a registered compiled statement . If this session has
* not already been linked with the given statement , then the statement use
* count is incremented .
* @ param csid the compiled statement identifier
* @ param sessionID the session identifier */
private void linkSession ( long csid , long sessionID ) { } } | LongKeyIntValueHashMap scsMap ; scsMap = ( LongKeyIntValueHashMap ) sessionUseMap . get ( sessionID ) ; if ( scsMap == null ) { scsMap = new LongKeyIntValueHashMap ( ) ; sessionUseMap . put ( sessionID , scsMap ) ; } int count = scsMap . get ( csid , 0 ) ; scsMap . put ( csid , count + 1 ) ; if ( count == 0 ) { useMap . put ( csid , useMap . get ( csid , 0 ) + 1 ) ; } |
public class AdminWebauthAction { @ Execute public HtmlResponse details ( final int crudMode , final String id ) { } } | verifyCrudMode ( crudMode , CrudMode . DETAILS ) ; saveToken ( ) ; return asHtml ( path_AdminWebauth_AdminWebauthDetailsJsp ) . useForm ( EditForm . class , op -> { op . setup ( form -> { webAuthenticationService . getWebAuthentication ( id ) . ifPresent ( entity -> { copyBeanToBean ( entity , form , copyOp -> { copyOp . excludeNull ( ) ; } ) ; form . crudMode = crudMode ; } ) . orElse ( ( ) -> { throwValidationError ( messages -> messages . addErrorsCrudCouldNotFindCrudTable ( GLOBAL , id ) , ( ) -> asListHtml ( ) ) ; } ) ; } ) ; } ) . renderWith ( data -> { registerProtocolSchemeItems ( data ) ; registerWebConfigItems ( data ) ; } ) ; |
public class DefaultNotifyHandler { /** * This is for non - Camel style event handlers .
* It is not used here . Overriding has no effect in the context of a Camel route . */
public String handleEventMessage ( String message , Object messageObj , Map < String , String > metaInfo ) throws EventHandlerException { } } | return null ; |
public class ElemVariable { /** * If the children of a variable is a single xsl : value - of or text literal ,
* it is cheaper to evaluate this as an expression , so try and adapt the
* child an an expression .
* @ param varElem Should be a ElemParam , ElemVariable , or ElemWithParam .
* @ return An XPath if rewrite is possible , else null .
* @ throws TransformerException */
static XPath rewriteChildToExpression ( ElemTemplateElement varElem ) throws TransformerException { } } | ElemTemplateElement t = varElem . getFirstChildElem ( ) ; // Down the line this can be done with multiple string objects using
// the concat function .
if ( null != t && null == t . getNextSiblingElem ( ) ) { int etype = t . getXSLToken ( ) ; if ( Constants . ELEMNAME_VALUEOF == etype ) { ElemValueOf valueof = ( ElemValueOf ) t ; // % TBD % I ' m worried about extended attributes here .
if ( valueof . getDisableOutputEscaping ( ) == false && valueof . getDOMBackPointer ( ) == null ) { varElem . m_firstChild = null ; return new XPath ( new XRTreeFragSelectWrapper ( valueof . getSelect ( ) . getExpression ( ) ) ) ; } } else if ( Constants . ELEMNAME_TEXTLITERALRESULT == etype ) { ElemTextLiteral lit = ( ElemTextLiteral ) t ; if ( lit . getDisableOutputEscaping ( ) == false && lit . getDOMBackPointer ( ) == null ) { String str = lit . getNodeValue ( ) ; XString xstr = new XString ( str ) ; varElem . m_firstChild = null ; return new XPath ( new XRTreeFragSelectWrapper ( xstr ) ) ; } } } return null ; |
public class PersistentExecutorImpl { /** * Returns the execution properties for the specified task .
* @ param task Callable or Runnable which might or might not implement ManagedTask .
* @ return the execution properties for the specified task . */
@ Trivial Map < String , String > getExecutionProperties ( Object task ) { } } | Map < String , String > execProps = task instanceof ManagedTask ? ( ( ManagedTask ) task ) . getExecutionProperties ( ) : null ; if ( execProps == null ) execProps = defaultExecProps ; else { Map < String , String > mergedProps = new TreeMap < String , String > ( defaultExecProps ) ; mergedProps . putAll ( execProps ) ; execProps = mergedProps ; } return execProps ; |
public class EsigateServer { /** * Get an integer from System properties
* @ param prefix
* @ param name
* @ param defaultValue
* @ return */
private static int getProperty ( String prefix , String name , int defaultValue ) { } } | int result = defaultValue ; try { result = Integer . parseInt ( System . getProperty ( prefix + name ) ) ; } catch ( NumberFormatException e ) { LOG . warn ( "Value for " + prefix + name + " must be an integer. Using default " + defaultValue ) ; } return result ; |
public class TimeToLiveSpecificationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( TimeToLiveSpecification timeToLiveSpecification , ProtocolMarshaller protocolMarshaller ) { } } | if ( timeToLiveSpecification == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( timeToLiveSpecification . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( timeToLiveSpecification . getAttributeName ( ) , ATTRIBUTENAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StorageUtil { /** * reads 2 XML Element Attribute ans cast it to a Credential
* @ param el XML Element to read Attribute from it
* @ param attributeUser Name of the user Attribute to read
* @ param attributePassword Name of the password Attribute to read
* @ param defaultCredentials
* @ return Attribute Value */
public Credentials toCredentials ( Element el , String attributeUser , String attributePassword , Credentials defaultCredentials ) { } } | String user = el . getAttribute ( attributeUser ) ; String pass = el . getAttribute ( attributePassword ) ; if ( user == null ) return defaultCredentials ; if ( pass == null ) pass = "" ; return CredentialsImpl . toCredentials ( user , pass ) ; |
public class XmlStreamReaderUtils { /** * Returns the value of an attribute as a byte . If the attribute is empty , this method returns
* the default value provided .
* @ param reader
* < code > XMLStreamReader < / code > that contains attribute values .
* @ param localName
* local name of attribute ( the namespace is ignored ) .
* @ param defaultValue
* default value
* @ return value of attribute , or the default value if the attribute is empty . */
public static byte optionalByteAttribute ( final XMLStreamReader reader , final String localName , final byte defaultValue ) { } } | return optionalByteAttribute ( reader , null , localName , defaultValue ) ; |
public class Search { /** * Queries a Search Index and returns grouped results in a map where key
* of the map is the groupName . In case the query didnt use grouping ,
* an empty map is returned
* @ param < T > Object type T
* @ param query the Lucene query to be passed to the Search index
* @ param classOfT The class of type T
* @ return The result of the grouped search query as a ordered { @ code Map < String , T > } */
public < T > Map < String , List < T > > queryGroups ( String query , Class < T > classOfT ) { } } | InputStream instream = null ; try { Reader reader = new InputStreamReader ( instream = queryForStream ( query ) , "UTF-8" ) ; JsonObject json = new JsonParser ( ) . parse ( reader ) . getAsJsonObject ( ) ; Map < String , List < T > > result = new LinkedHashMap < String , List < T > > ( ) ; if ( json . has ( "groups" ) ) { for ( JsonElement e : json . getAsJsonArray ( "groups" ) ) { String groupName = e . getAsJsonObject ( ) . get ( "by" ) . getAsString ( ) ; List < T > orows = new ArrayList < T > ( ) ; if ( ! includeDocs ) { log . warning ( "includeDocs set to false and attempting to retrieve doc. " + "null object will be returned" ) ; } for ( JsonElement rows : e . getAsJsonObject ( ) . getAsJsonArray ( "rows" ) ) { orows . add ( jsonToObject ( client . getGson ( ) , rows , "doc" , classOfT ) ) ; } result . put ( groupName , orows ) ; } // end for ( groups )
} // end hasgroups
else { log . warning ( "No grouped results available. Use query() if non grouped query" ) ; } return result ; } catch ( UnsupportedEncodingException e1 ) { // This should never happen as every implementation of the java platform is required
// to support UTF - 8.
throw new RuntimeException ( e1 ) ; } finally { close ( instream ) ; } |
public class LocalSubmissionFromCS { /** * Takes the local job submission configuration file , deserializes it , and creates submission object . */
static LocalSubmissionFromCS fromSubmissionParameterFiles ( final File localJobSubmissionParametersFile , final File localAppSubmissionParametersFile ) throws IOException { } } | final AvroLocalAppSubmissionParameters localAppSubmissionParameters ; final AvroLocalJobSubmissionParameters localJobSubmissionParameters ; try ( final FileInputStream fileInputStream = new FileInputStream ( localJobSubmissionParametersFile ) ) { final JsonDecoder decoder = DecoderFactory . get ( ) . jsonDecoder ( AvroLocalJobSubmissionParameters . getClassSchema ( ) , fileInputStream ) ; final SpecificDatumReader < AvroLocalJobSubmissionParameters > reader = new SpecificDatumReader < > ( AvroLocalJobSubmissionParameters . class ) ; localJobSubmissionParameters = reader . read ( null , decoder ) ; } try ( final FileInputStream fileInputStream = new FileInputStream ( localAppSubmissionParametersFile ) ) { final JsonDecoder decoder = DecoderFactory . get ( ) . jsonDecoder ( AvroLocalAppSubmissionParameters . getClassSchema ( ) , fileInputStream ) ; final SpecificDatumReader < AvroLocalAppSubmissionParameters > reader = new SpecificDatumReader < > ( AvroLocalAppSubmissionParameters . class ) ; localAppSubmissionParameters = reader . read ( null , decoder ) ; } return new LocalSubmissionFromCS ( localJobSubmissionParameters , localAppSubmissionParameters ) ; |
public class ExecutionGraph { /** * Deserializes accumulators from a task state update .
* < p > This method never throws an exception !
* @ param state The task execution state from which to deserialize the accumulators .
* @ return The deserialized accumulators , of null , if there are no accumulators or an error occurred . */
private Map < String , Accumulator < ? , ? > > deserializeAccumulators ( TaskExecutionState state ) { } } | AccumulatorSnapshot serializedAccumulators = state . getAccumulators ( ) ; if ( serializedAccumulators != null ) { try { return serializedAccumulators . deserializeUserAccumulators ( userClassLoader ) ; } catch ( Throwable t ) { // we catch Throwable here to include all form of linking errors that may
// occur if user classes are missing in the classpath
LOG . error ( "Failed to deserialize final accumulator results." , t ) ; } } return null ; |
public class ProxyFactory { /** * 构建代理类实例
* @ param proxyType 代理类型
* @ param clazz 原始类
* @ param proxyInvoker 代码执行的Invoker
* @ param < T > 类型
* @ return 代理类实例
* @ throws Exception */
public static < T > T buildProxy ( String proxyType , Class < T > clazz , Invoker proxyInvoker ) throws Exception { } } | try { ExtensionClass < Proxy > ext = ExtensionLoaderFactory . getExtensionLoader ( Proxy . class ) . getExtensionClass ( proxyType ) ; if ( ext == null ) { throw ExceptionUtils . buildRuntime ( "consumer.proxy" , proxyType , "Unsupported proxy of client!" ) ; } Proxy proxy = ext . getExtInstance ( ) ; return proxy . getProxy ( clazz , proxyInvoker ) ; } catch ( SofaRpcRuntimeException e ) { throw e ; } catch ( Throwable e ) { throw new SofaRpcRuntimeException ( e . getMessage ( ) , e ) ; } |
public class MessageBean { /** * Set error location in source document .
* @ param elem source element .
* @ return message bean with location set */
public MessageBean setLocation ( final Element elem ) { } } | final MessageBean ret = new MessageBean ( this ) ; final String xtrf = elem . getAttribute ( ATTRIBUTE_NAME_XTRF ) ; if ( ! xtrf . isEmpty ( ) ) { ret . srcFile = toURI ( xtrf ) ; } final String xtrc = elem . getAttribute ( ATTRIBUTE_NAME_XTRC ) ; if ( ! xtrc . isEmpty ( ) ) { final int sep = xtrc . indexOf ( ';' ) ; if ( sep != - 1 ) { final int delim = xtrc . indexOf ( COLON , sep + 1 ) ; if ( delim != - 1 ) { ret . srcLine = Integer . parseInt ( xtrc . substring ( sep + 1 , delim ) ) ; ret . srcColumn = Integer . parseInt ( xtrc . substring ( delim + 1 ) ) ; } } } return ret ; |
public class RepositoryMetadataMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RepositoryMetadata repositoryMetadata , ProtocolMarshaller protocolMarshaller ) { } } | if ( repositoryMetadata == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( repositoryMetadata . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getRepositoryId ( ) , REPOSITORYID_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getRepositoryDescription ( ) , REPOSITORYDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getDefaultBranch ( ) , DEFAULTBRANCH_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getLastModifiedDate ( ) , LASTMODIFIEDDATE_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getCloneUrlHttp ( ) , CLONEURLHTTP_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getCloneUrlSsh ( ) , CLONEURLSSH_BINDING ) ; protocolMarshaller . marshall ( repositoryMetadata . getArn ( ) , ARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class TemplateBase { /** * Set output file
* @ param file */
protected void __setOutput ( File file ) { } } | try { w_ = new BufferedWriter ( new FileWriter ( file ) ) ; } catch ( Exception e ) { throw new FastRuntimeException ( e . getMessage ( ) ) ; } |
public class ClassLoader { /** * Defines a package by name in this < tt > ClassLoader < / tt > . This allows
* class loaders to define the packages for their classes . Packages must
* be created before the class is defined , and package names must be
* unique within a class loader and cannot be redefined or changed once
* created .
* @ param name
* The package name
* @ param specTitle
* The specification title
* @ param specVersion
* The specification version
* @ param specVendor
* The specification vendor
* @ param implTitle
* The implementation title
* @ param implVersion
* The implementation version
* @ param implVendor
* The implementation vendor
* @ param sealBase
* If not < tt > null < / tt > , then this package is sealed with
* respect to the given code source { @ link java . net . URL
* < tt > URL < / tt > } object . Otherwise , the package is not sealed .
* @ return The newly defined < tt > Package < / tt > object
* @ throws IllegalArgumentException
* If package name duplicates an existing package either in this
* class loader or one of its ancestors
* @ since 1.2 */
protected Package definePackage ( String name , String specTitle , String specVersion , String specVendor , String implTitle , String implVersion , String implVendor , URL sealBase ) throws IllegalArgumentException { } } | synchronized ( packages ) { Package pkg = packages . get ( name ) ; if ( pkg != null ) { throw new IllegalArgumentException ( name ) ; } pkg = new Package ( name , specTitle , specVersion , specVendor , implTitle , implVersion , implVendor , sealBase , this ) ; packages . put ( name , pkg ) ; return pkg ; } |
public class StanzaCollector { /** * Polls to see if a stanza is currently available and returns it , or
* immediately returns < tt > null < / tt > if no packets are currently in the
* result queue .
* Throws an XMPPErrorException in case the polled stanzas did contain an XMPPError .
* @ param < P > type of the result stanza .
* @ return the next available packet .
* @ throws XMPPErrorException in case an error response . */
public < P extends Stanza > P pollResultOrThrow ( ) throws XMPPErrorException { } } | P result = pollResult ( ) ; if ( result != null ) { XMPPErrorException . ifHasErrorThenThrow ( result ) ; } return result ; |
public class JsonRpcResponse { /** * Builds a new response for an identifier request and containing an error .
* @ param error the error to return to the user
* @ param id the identifier of the request for which this response if
* generated */
static JsonRpcResponse error ( JsonRpcError error , JsonElement id ) { } } | return new JsonRpcResponse ( id , error , null ) ; |
public class PathCompiler { /** * Parse the parameters of a function call , either the caller has supplied JSON data , or the caller has supplied
* another path expression which must be evaluated and in turn invoked against the root document . In this tokenizer
* we ' re only concerned with parsing the path thus the output of this function is a list of parameters with the Path
* set if the parameter is an expression . If the parameter is a JSON document then the value of the cachedValue is
* set on the object .
* Sequence for parsing out the parameters :
* This code has its own tokenizer - it does some rudimentary level of lexing in that it can distinguish between JSON block parameters
* and sub - JSON blocks - it effectively regex ' s out the parameters into string blocks that can then be passed along to the appropriate parser .
* Since sub - jsonpath expressions can themselves contain other function calls this routine needs to be sensitive to token counting to
* determine the boundaries . Since the Path parser isn ' t aware of JSON processing this uber routine is needed .
* Parameters are separated by COMMAs ' , '
* < pre >
* doc = { " numbers " : [ 1,2,3,4,5,6,7,8,9,10 ] }
* $ . sum ( { 10 } , $ . numbers . avg ( ) )
* < / pre >
* The above is a valid function call , we ' re first summing 10 + avg of 1 . . . 10 ( 5.5 ) so the total should be 15.5
* @ return
* An ordered list of parameters that are to processed via the function . Typically functions either process
* an array of values and / or can consume parameters in addition to the values provided from the consumption of
* an array . */
private List < Parameter > parseFunctionParameters ( String funcName ) { } } | ParamType type = null ; // Parenthesis starts at 1 since we ' re marking the start of a function call , the close paren will denote the
// last parameter boundary
Integer groupParen = 1 , groupBracket = 0 , groupBrace = 0 , groupQuote = 0 ; Boolean endOfStream = false ; char priorChar = 0 ; List < Parameter > parameters = new ArrayList < Parameter > ( ) ; StringBuilder parameter = new StringBuilder ( ) ; while ( path . inBounds ( ) && ! endOfStream ) { char c = path . currentChar ( ) ; path . incrementPosition ( 1 ) ; // we ' re at the start of the stream , and don ' t know what type of parameter we have
if ( type == null ) { if ( isWhitespace ( c ) ) { continue ; } if ( c == OPEN_BRACE || isDigit ( c ) || DOUBLE_QUOTE == c ) { type = ParamType . JSON ; } else if ( isPathContext ( c ) ) { type = ParamType . PATH ; // read until we reach a terminating comma and we ' ve reset grouping to zero
} } switch ( c ) { case DOUBLE_QUOTE : if ( priorChar != '\\' && groupQuote > 0 ) { if ( groupQuote == 0 ) { throw new InvalidPathException ( "Unexpected quote '\"' at character position: " + path . position ( ) ) ; } groupQuote -- ; } else { groupQuote ++ ; } break ; case OPEN_PARENTHESIS : groupParen ++ ; break ; case OPEN_BRACE : groupBrace ++ ; break ; case OPEN_SQUARE_BRACKET : groupBracket ++ ; break ; case CLOSE_BRACE : if ( 0 == groupBrace ) { throw new InvalidPathException ( "Unexpected close brace '}' at character position: " + path . position ( ) ) ; } groupBrace -- ; break ; case CLOSE_SQUARE_BRACKET : if ( 0 == groupBracket ) { throw new InvalidPathException ( "Unexpected close bracket ']' at character position: " + path . position ( ) ) ; } groupBracket -- ; break ; // In either the close paren case where we have zero paren groups left , capture the parameter , or where
// we ' ve encountered a COMMA do the same
case CLOSE_PARENTHESIS : groupParen -- ; if ( 0 != groupParen ) { parameter . append ( c ) ; } case COMMA : // In this state we ' ve reach the end of a function parameter and we can pass along the parameter string
// to the parser
if ( ( 0 == groupQuote && 0 == groupBrace && 0 == groupBracket && ( ( 0 == groupParen && CLOSE_PARENTHESIS == c ) || 1 == groupParen ) ) ) { endOfStream = ( 0 == groupParen ) ; if ( null != type ) { Parameter param = null ; switch ( type ) { case JSON : // parse the json and set the value
param = new Parameter ( parameter . toString ( ) ) ; break ; case PATH : LinkedList < Predicate > predicates = new LinkedList < Predicate > ( ) ; PathCompiler compiler = new PathCompiler ( parameter . toString ( ) , predicates ) ; param = new Parameter ( compiler . compile ( ) ) ; break ; } if ( null != param ) { parameters . add ( param ) ; } parameter . delete ( 0 , parameter . length ( ) ) ; type = null ; } } break ; } if ( type != null && ! ( c == COMMA && 0 == groupBrace && 0 == groupBracket && 1 == groupParen ) ) { parameter . append ( c ) ; } priorChar = c ; } if ( 0 != groupBrace || 0 != groupParen || 0 != groupBracket ) { throw new InvalidPathException ( "Arguments to function: '" + funcName + "' are not closed properly." ) ; } return parameters ; |
public class Utils { /** * Ensures that an object reference passed as a parameter to the calling method is not null .
* @ param value an object reference
* @ param varName the variable name to use in an exception message if the check fails
* @ return the non - null reference that was validated
* @ throws NullPointerException if { @ code value } is null */
public static < T > T checkNotNull ( @ Nullable T value , Object varName ) { } } | if ( value == null ) { throw new NullPointerException ( format ( "{} cannot be null" , varName ) ) ; } return value ; |
public class SessionContextRegistry { /** * removes the sessioncontext from the registry */
public static synchronized void remove ( String appname ) { } } | if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_CORE . logp ( Level . FINE , methodClassName , "remove" , appname ) ; } scrSessionContexts . remove ( appname ) ; |
public class BlockingList { /** * Get a point - in - time view of the unmatched keys .
* This may be immediately out of date unless additional
* synchronization is performed to prevent concurrent updates . */
public Set < K > getUnmatchedKeys ( ) { } } | stateLock . readLock ( ) . lock ( ) ; try { return new HashSet < K > ( this . actualIndices . keySet ( ) ) ; } finally { stateLock . readLock ( ) . unlock ( ) ; } |
public class FileUtil { /** * 从文件中读取每一行数据
* @ param < T > 集合类型
* @ param url 文件的URL
* @ param charset 字符集
* @ param collection 集合
* @ return 文件中的每行内容的集合
* @ throws IORuntimeException IO异常
* @ since 3.1.1 */
public static < T extends Collection < String > > T readLines ( URL url , Charset charset , T collection ) throws IORuntimeException { } } | InputStream in = null ; try { in = url . openStream ( ) ; return IoUtil . readLines ( in , charset , collection ) ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; } finally { IoUtil . close ( in ) ; } |
public class ServiceObjectivesInner { /** * Gets a database service objective .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param serviceObjectiveName The name of the service objective to retrieve .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ServiceObjectiveInner > getAsync ( String resourceGroupName , String serverName , String serviceObjectiveName , final ServiceCallback < ServiceObjectiveInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , serverName , serviceObjectiveName ) , serviceCallback ) ; |
public class TimeShift { /** * Adjusts the timestamp of each datapoint by timeshift
* @ param points The data points to factor
* @ param timeshift The factor to multiply by
* @ return The resulting data points */
DataPoints shift ( final DataPoints points , final long timeshift ) { } } | // TODO ( cl ) - Using an array as the size function may not return the exact
// results and we should figure a way to avoid copying data anyway .
final List < DataPoint > dps = new ArrayList < DataPoint > ( ) ; for ( DataPoint pt : points ) { dps . add ( shift ( pt , timeshift ) ) ; } final DataPoint [ ] results = new DataPoint [ dps . size ( ) ] ; dps . toArray ( results ) ; return new PostAggregatedDataPoints ( points , results ) ; |
public class LongGauge { /** * Returns the no - op implementation of the { @ code LongGauge } .
* @ return the no - op implementation of the { @ code LongGauge } .
* @ since 0.17 */
static LongGauge newNoopLongGauge ( String name , String description , String unit , List < LabelKey > labelKeys ) { } } | return NoopLongGauge . create ( name , description , unit , labelKeys ) ; |
public class BooleanList { /** * Returns whether all elements of this List match the provided predicate .
* @ param filter
* @ return */
public < E extends Exception > boolean allMatch ( Try . BooleanPredicate < E > filter ) throws E { } } | return allMatch ( 0 , size ( ) , filter ) ; |
public class EscapeUtil { /** * Combine a list of strings to a single path with proper escaping .
* @ param pParts parts to combine
* @ return the combined path */
public static String combineToPath ( List < String > pParts ) { } } | if ( pParts != null && pParts . size ( ) > 0 ) { StringBuilder buf = new StringBuilder ( ) ; Iterator < String > it = pParts . iterator ( ) ; while ( it . hasNext ( ) ) { String part = it . next ( ) ; buf . append ( escapePart ( part != null ? part : "*" ) ) ; if ( it . hasNext ( ) ) { buf . append ( "/" ) ; } } return buf . toString ( ) ; } else { return null ; } |
public class KeyStroke { /** * Creates a Key from a string representation in Vim ' s key notation .
* @ param keyStr the string representation of this key
* @ return the created { @ link KeyType } */
public static KeyStroke fromString ( String keyStr ) { } } | String keyStrLC = keyStr . toLowerCase ( ) ; KeyStroke k ; if ( keyStr . length ( ) == 1 ) { k = new KeyStroke ( KeyType . Character , keyStr . charAt ( 0 ) , false , false , false ) ; } else if ( keyStr . startsWith ( "<" ) && keyStr . endsWith ( ">" ) ) { if ( keyStrLC . equals ( "<s-tab>" ) ) { k = new KeyStroke ( KeyType . ReverseTab ) ; } else if ( keyStr . contains ( "-" ) ) { ArrayList < String > segments = new ArrayList < String > ( Arrays . asList ( keyStr . substring ( 1 , keyStr . length ( ) - 1 ) . split ( "-" ) ) ) ; if ( segments . size ( ) < 2 ) { throw new IllegalArgumentException ( "Invalid vim notation: " + keyStr ) ; } String characterStr = segments . remove ( segments . size ( ) - 1 ) ; boolean altPressed = false ; boolean ctrlPressed = false ; for ( String modifier : segments ) { if ( "c" . equals ( modifier . toLowerCase ( ) ) ) { ctrlPressed = true ; } else if ( "a" . equals ( modifier . toLowerCase ( ) ) ) { altPressed = true ; } else if ( "s" . equals ( modifier . toLowerCase ( ) ) ) { characterStr = characterStr . toUpperCase ( ) ; } } k = new KeyStroke ( characterStr . charAt ( 0 ) , ctrlPressed , altPressed ) ; } else { if ( keyStrLC . startsWith ( "<esc" ) ) { k = new KeyStroke ( KeyType . Escape ) ; } else if ( keyStrLC . equals ( "<cr>" ) || keyStrLC . equals ( "<enter>" ) || keyStrLC . equals ( "<return>" ) ) { k = new KeyStroke ( KeyType . Enter ) ; } else if ( keyStrLC . equals ( "<bs>" ) ) { k = new KeyStroke ( KeyType . Backspace ) ; } else if ( keyStrLC . equals ( "<tab>" ) ) { k = new KeyStroke ( KeyType . Tab ) ; } else if ( keyStrLC . equals ( "<space>" ) ) { k = new KeyStroke ( ' ' , false , false ) ; } else if ( keyStrLC . equals ( "<up>" ) ) { k = new KeyStroke ( KeyType . ArrowUp ) ; } else if ( keyStrLC . equals ( "<down>" ) ) { k = new KeyStroke ( KeyType . ArrowDown ) ; } else if ( keyStrLC . equals ( "<left>" ) ) { k = new KeyStroke ( KeyType . ArrowLeft ) ; } else if ( keyStrLC . equals ( "<right>" ) ) { k = new KeyStroke ( KeyType . ArrowRight ) ; } else if ( keyStrLC . equals ( "<insert>" ) ) { k = new KeyStroke ( KeyType . Insert ) ; } else if ( keyStrLC . equals ( "<del>" ) ) { k = new KeyStroke ( KeyType . Delete ) ; } else if ( keyStrLC . equals ( "<home>" ) ) { k = new KeyStroke ( KeyType . Home ) ; } else if ( keyStrLC . equals ( "<end>" ) ) { k = new KeyStroke ( KeyType . End ) ; } else if ( keyStrLC . equals ( "<pageup>" ) ) { k = new KeyStroke ( KeyType . PageUp ) ; } else if ( keyStrLC . equals ( "<pagedown>" ) ) { k = new KeyStroke ( KeyType . PageDown ) ; } else if ( keyStrLC . equals ( "<f1>" ) ) { k = new KeyStroke ( KeyType . F1 ) ; } else if ( keyStrLC . equals ( "<f2>" ) ) { k = new KeyStroke ( KeyType . F2 ) ; } else if ( keyStrLC . equals ( "<f3>" ) ) { k = new KeyStroke ( KeyType . F3 ) ; } else if ( keyStrLC . equals ( "<f4>" ) ) { k = new KeyStroke ( KeyType . F4 ) ; } else if ( keyStrLC . equals ( "<f5>" ) ) { k = new KeyStroke ( KeyType . F5 ) ; } else if ( keyStrLC . equals ( "<f6>" ) ) { k = new KeyStroke ( KeyType . F6 ) ; } else if ( keyStrLC . equals ( "<f7>" ) ) { k = new KeyStroke ( KeyType . F7 ) ; } else if ( keyStrLC . equals ( "<f8>" ) ) { k = new KeyStroke ( KeyType . F8 ) ; } else if ( keyStrLC . equals ( "<f9>" ) ) { k = new KeyStroke ( KeyType . F9 ) ; } else if ( keyStrLC . equals ( "<f10>" ) ) { k = new KeyStroke ( KeyType . F10 ) ; } else if ( keyStrLC . equals ( "<f11>" ) ) { k = new KeyStroke ( KeyType . F11 ) ; } else if ( keyStrLC . equals ( "<f12>" ) ) { k = new KeyStroke ( KeyType . F12 ) ; } else { throw new IllegalArgumentException ( "Invalid vim notation: " + keyStr ) ; } } } else { throw new IllegalArgumentException ( "Invalid vim notation: " + keyStr ) ; } return k ; |
public class IndentedConfigReaderMapping { /** * Adds a message to the list .
* @ param fileName
* @ param lineNr
* @ param line
* @ param errorMessage */
private static void reportError ( List messages , String fileName , int lineNr , String line , String errorMessage ) { } } | messages . add ( new Date ( ) + " ERROR in \"" + fileName + "\" at line " + lineNr + ':' ) ; messages . add ( errorMessage ) ; |
public class EmojiUtil { /** * 转换emoji < br >
* Example : < code > 🍀 < / code > 将转变为
* & lt ; span class = ' emoji emoji1f340 ' & gt ; & lt ; / span & gt ; < br >
* @ param emoji _ str emoji _ str
* @ return emoji _ result */
public static String parseToHtmlTag ( String emoji_str ) { } } | if ( emoji_str != null ) { String str = EmojiParser . parseToHtmlHexadecimal ( emoji_str ) ; return htmlHexadecimalToHtmlTag ( str ) ; } return null ; |
public class Security { /** * Returns an array of objects : the first object in the array is
* an instance of an implementation of the requested algorithm
* and type , and the second object in the array identifies the provider
* of that implementation .
* The < code > provider < / code > argument can be null , in which case all
* configured providers will be searched in order of preference . */
static Object [ ] getImpl ( String algorithm , String type , String provider ) throws NoSuchAlgorithmException , NoSuchProviderException { } } | if ( provider == null ) { return GetInstance . getInstance ( type , getSpiClass ( type ) , algorithm ) . toArray ( ) ; } else { return GetInstance . getInstance ( type , getSpiClass ( type ) , algorithm , provider ) . toArray ( ) ; } |
public class Transformers { /** * Removes pairs non - recursively from a stream . Uses
* { @ code Transformers . stateMachine ( ) } under the covers to ensure items are
* emitted as soon as possible ( if an item can ' t be in a pair then it is
* emitted straight away ) .
* @ param isCandidateForFirst
* returns true if item is potentially the first of a pair that
* we want to remove
* @ param remove
* returns true if a pair should be removed
* @ param < T >
* generic type of stream being transformed
* @ return transformed stream */
public static < T > Transformer < T , T > removePairs ( final Func1 < ? super T , Boolean > isCandidateForFirst , final Func2 < ? super T , ? super T , Boolean > remove ) { } } | return new Transformer < T , T > ( ) { @ Override public Observable < T > call ( Observable < T > o ) { return o . compose ( Transformers . stateMachine ( ) . initialState ( Optional . < T > absent ( ) ) . transition ( new Transition < Optional < T > , T , T > ( ) { @ Override public Optional < T > call ( Optional < T > state , T value , Subscriber < T > subscriber ) { if ( ! state . isPresent ( ) ) { if ( isCandidateForFirst . call ( value ) ) { return Optional . of ( value ) ; } else { subscriber . onNext ( value ) ; return Optional . absent ( ) ; } } else { if ( remove . call ( state . get ( ) , value ) ) { // emit nothing and reset state
return Optional . absent ( ) ; } else { subscriber . onNext ( state . get ( ) ) ; if ( isCandidateForFirst . call ( value ) ) { return Optional . of ( value ) ; } else { subscriber . onNext ( value ) ; return Optional . absent ( ) ; } } } } } ) . completion ( new Completion < Optional < T > , T > ( ) { @ Override public Boolean call ( Optional < T > state , Subscriber < T > subscriber ) { if ( state . isPresent ( ) ) subscriber . onNext ( state . get ( ) ) ; // yes , complete
return true ; } } ) . build ( ) ) ; } } ; |
public class HiveMetaStoreBasedRegister { /** * If databse existed on Hive side will return false ;
* Or will create the table thru . RPC and return retVal from remote MetaStore .
* @ param hiveDbName is the hive databases to be checked for existence */
private boolean ensureHiveDbExistence ( String hiveDbName , IMetaStoreClient client ) throws IOException { } } | try ( AutoCloseableLock lock = this . locks . getDbLock ( hiveDbName ) ) { Database db = new Database ( ) ; db . setName ( hiveDbName ) ; try { try ( Timer . Context context = this . metricContext . timer ( GET_HIVE_DATABASE ) . time ( ) ) { client . getDatabase ( db . getName ( ) ) ; } return false ; } catch ( NoSuchObjectException nsoe ) { // proceed with create
} catch ( TException te ) { throw new IOException ( te ) ; } Preconditions . checkState ( this . hiveDbRootDir . isPresent ( ) , "Missing required property " + HiveRegProps . HIVE_DB_ROOT_DIR ) ; db . setLocationUri ( new Path ( this . hiveDbRootDir . get ( ) , hiveDbName + HIVE_DB_EXTENSION ) . toString ( ) ) ; try { try ( Timer . Context context = this . metricContext . timer ( CREATE_HIVE_DATABASE ) . time ( ) ) { client . createDatabase ( db ) ; } log . info ( "Created database " + hiveDbName ) ; HiveMetaStoreEventHelper . submitSuccessfulDBCreation ( this . eventSubmitter , hiveDbName ) ; return true ; } catch ( AlreadyExistsException e ) { return false ; } catch ( TException e ) { HiveMetaStoreEventHelper . submitFailedDBCreation ( this . eventSubmitter , hiveDbName , e ) ; throw new IOException ( "Unable to create Hive database " + hiveDbName , e ) ; } } |
public class NodeSetDTM { /** * Inserts the specified node in this vector at the specified index .
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously .
* @ param value The node to be inserted .
* @ param at The index where the insert should occur .
* @ throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type . */
public void insertElementAt ( int value , int at ) { } } | if ( ! m_mutable ) throw new RuntimeException ( XSLMessages . createXPATHMessage ( XPATHErrorResources . ER_NODESETDTM_NOT_MUTABLE , null ) ) ; // " This NodeSetDTM is not mutable ! " ) ;
super . insertElementAt ( value , at ) ; |
public class AllureLifecycle { /** * Start a new fixture with given uuid .
* @ param uuid the uuid of fixture .
* @ param result the test fixture . */
private void startFixture ( final String uuid , final FixtureResult result ) { } } | storage . put ( uuid , result ) ; result . setStage ( Stage . RUNNING ) ; result . setStart ( System . currentTimeMillis ( ) ) ; threadContext . clear ( ) ; threadContext . start ( uuid ) ; |
public class EclipseIndexWriter { /** * Output the given indexterm into the XML writer .
* @ param term term to serialize
* @ param serializer XML output to write to
* @ param indexsee is term a see term */
private void outputIndexTerm ( final IndexTerm term , final XMLStreamWriter serializer , final boolean indexsee ) throws XMLStreamException { } } | final List < IndexTerm > subTerms = term . getSubTerms ( ) ; final int subTermNum = subTerms . size ( ) ; outputIndexTermStartElement ( term , serializer , indexsee ) ; if ( subTermNum > 0 ) { for ( final IndexTerm subTerm : subTerms ) { outputIndexTerm ( subTerm , serializer , indexsee ) ; } } outputIndexTermEndElement ( term , serializer , indexsee ) ; |
public class login { /** * < pre >
* Login .
* < / pre > */
public static login add ( nitro_service client , login resource ) throws Exception { } } | resource . validate ( "add" ) ; return ( ( login [ ] ) resource . perform_operation ( client , "add" ) ) [ 0 ] ; |
public class TransactGetItemsRequest { /** * An ordered array of up to 10 < code > TransactGetItem < / code > objects , each of which contains a < code > Get < / code >
* structure .
* @ param transactItems
* An ordered array of up to 10 < code > TransactGetItem < / code > objects , each of which contains a
* < code > Get < / code > structure . */
public void setTransactItems ( java . util . Collection < TransactGetItem > transactItems ) { } } | if ( transactItems == null ) { this . transactItems = null ; return ; } this . transactItems = new java . util . ArrayList < TransactGetItem > ( transactItems ) ; |
public class IOUtilities { /** * Use this when you expect a byte [ ] length of bytes to be read from the InputStream */
public static void transfer ( InputStream in , byte [ ] bytes ) throws IOException { } } | // Read in the bytes
int offset = 0 ; int numRead ; while ( offset < bytes . length && ( numRead = in . read ( bytes , offset , bytes . length - offset ) ) >= 0 ) { offset += numRead ; } if ( offset < bytes . length ) { throw new IOException ( "Retry: Not all bytes were transferred correctly." ) ; } |
public class MapKeyLoaderUtil { /** * Transforms an iterator of entries to an iterator of entry batches
* where each batch is represented as a map from entry key to
* list of entry values .
* The maximum size of the entry value list in any batch is
* determined by the { @ code maxBatch } parameter . Only one
* entry value list may have the { @ code maxBatch } size , other
* lists will be smaller .
* @ param entries the entries to be batched
* @ param maxBatch the maximum size of an entry group in a single batch
* @ return an iterator with entry batches */
static Iterator < Map < Integer , List < Data > > > toBatches ( final Iterator < Entry < Integer , Data > > entries , final int maxBatch ) { } } | return new UnmodifiableIterator < Map < Integer , List < Data > > > ( ) { @ Override public boolean hasNext ( ) { return entries . hasNext ( ) ; } @ Override public Map < Integer , List < Data > > next ( ) { if ( ! entries . hasNext ( ) ) { throw new NoSuchElementException ( ) ; } return nextBatch ( entries , maxBatch ) ; } } ; |
public class AbstractResultSetWrapper { /** * { @ inheritDoc }
* @ see java . sql . ResultSet # updateBytes ( java . lang . String , byte [ ] ) */
@ Override public void updateBytes ( final String columnLabel , final byte [ ] x ) throws SQLException { } } | wrapped . updateBytes ( columnLabel , x ) ; |
public class EUI48 { /** * Compares this { @ link EUI48 } with the specified { @ link EUI48 } based on their bit
* representations .
* @ param val { @ code EUI48 } to which this { @ code EUI48 } should be compared .
* @ return { @ code - 1 } , { @ code 0 } or { @ code 1 } if this { @ link EUI48 } is less than , equal to or
* greater than { @ code val } .
* @ throws NullPointerException if { @ code val } is { @ code null } . */
@ Override public int compareTo ( EUI48 val ) { } } | if ( this . mostSignificantBits < val . mostSignificantBits ) { return - 1 ; } else if ( this . mostSignificantBits > val . mostSignificantBits ) { return 1 ; } else if ( this . leastSignificantBits < val . leastSignificantBits ) { return - 1 ; } else if ( this . leastSignificantBits > val . leastSignificantBits ) { return 1 ; } else { return 0 ; } |
public class MultivaluedAttributeMerger { /** * / * ( non - Javadoc )
* @ see org . jasig . services . persondir . support . merger . BaseAdditiveAttributeMerger # mergePersonAttributes ( java . util . Map , java . util . Map ) */
@ Override protected Map < String , List < Object > > mergePersonAttributes ( final Map < String , List < Object > > toModify , final Map < String , List < Object > > toConsider ) { } } | Validate . notNull ( toModify , "toModify cannot be null" ) ; Validate . notNull ( toConsider , "toConsider cannot be null" ) ; for ( final Map . Entry < String , List < Object > > sourceEntry : toConsider . entrySet ( ) ) { final String sourceKey = sourceEntry . getKey ( ) ; List < Object > destList = toModify . get ( sourceKey ) ; if ( destList == null ) { destList = new LinkedList < > ( ) ; toModify . put ( sourceKey , destList ) ; } final List < Object > sourceValue = sourceEntry . getValue ( ) ; destList . addAll ( sourceValue ) ; } return toModify ; |
public class DependencyList { /** * Returns true if the underlying dependencies have changed . */
public boolean isModified ( ) { } } | for ( int i = _dependencyList . size ( ) - 1 ; i >= 0 ; i -- ) { Dependency dependency = _dependencyList . get ( i ) ; if ( dependency . isModified ( ) ) { return true ; } } return false ; |
public class RESTClientConfig { /** * Set the values using the specified Properties object .
* @ param properties Properties object containing specific property values
* for the RESTClient config
* Note : We ' re using the same property names as that in ClientConfig
* for backwards compatibility . */
private void setProperties ( Properties properties ) { } } | Props props = new Props ( properties ) ; if ( props . containsKey ( ClientConfig . ENABLE_JMX_PROPERTY ) ) { this . setEnableJmx ( props . getBoolean ( ClientConfig . ENABLE_JMX_PROPERTY ) ) ; } if ( props . containsKey ( ClientConfig . BOOTSTRAP_URLS_PROPERTY ) ) { List < String > urls = props . getList ( ClientConfig . BOOTSTRAP_URLS_PROPERTY ) ; if ( urls . size ( ) > 0 ) { setHttpBootstrapURL ( urls . get ( 0 ) ) ; } } if ( props . containsKey ( ClientConfig . MAX_TOTAL_CONNECTIONS_PROPERTY ) ) { setMaxR2ConnectionPoolSize ( props . getInt ( ClientConfig . MAX_TOTAL_CONNECTIONS_PROPERTY , maxR2ConnectionPoolSize ) ) ; } if ( props . containsKey ( ClientConfig . ROUTING_TIMEOUT_MS_PROPERTY ) ) this . setTimeoutMs ( props . getLong ( ClientConfig . ROUTING_TIMEOUT_MS_PROPERTY , timeoutMs ) , TimeUnit . MILLISECONDS ) ; // By default , make all the timeouts equal to routing timeout
timeoutConfig = new TimeoutConfig ( timeoutMs , false ) ; if ( props . containsKey ( ClientConfig . GETALL_ROUTING_TIMEOUT_MS_PROPERTY ) ) timeoutConfig . setOperationTimeout ( VoldemortOpCode . GET_ALL_OP_CODE , props . getInt ( ClientConfig . GETALL_ROUTING_TIMEOUT_MS_PROPERTY ) ) ; if ( props . containsKey ( ClientConfig . GET_ROUTING_TIMEOUT_MS_PROPERTY ) ) timeoutConfig . setOperationTimeout ( VoldemortOpCode . GET_OP_CODE , props . getInt ( ClientConfig . GET_ROUTING_TIMEOUT_MS_PROPERTY ) ) ; if ( props . containsKey ( ClientConfig . PUT_ROUTING_TIMEOUT_MS_PROPERTY ) ) { long putTimeoutMs = props . getInt ( ClientConfig . PUT_ROUTING_TIMEOUT_MS_PROPERTY ) ; timeoutConfig . setOperationTimeout ( VoldemortOpCode . PUT_OP_CODE , putTimeoutMs ) ; // By default , use the same thing for getVersions ( ) also
timeoutConfig . setOperationTimeout ( VoldemortOpCode . GET_VERSION_OP_CODE , putTimeoutMs ) ; } // of course , if someone overrides it , we will respect that
if ( props . containsKey ( ClientConfig . GET_VERSIONS_ROUTING_TIMEOUT_MS_PROPERTY ) ) timeoutConfig . setOperationTimeout ( VoldemortOpCode . GET_VERSION_OP_CODE , props . getInt ( ClientConfig . GET_VERSIONS_ROUTING_TIMEOUT_MS_PROPERTY ) ) ; if ( props . containsKey ( ClientConfig . DELETE_ROUTING_TIMEOUT_MS_PROPERTY ) ) timeoutConfig . setOperationTimeout ( VoldemortOpCode . DELETE_OP_CODE , props . getInt ( ClientConfig . DELETE_ROUTING_TIMEOUT_MS_PROPERTY ) ) ; if ( props . containsKey ( ClientConfig . ALLOW_PARTIAL_GETALLS_PROPERTY ) ) timeoutConfig . setPartialGetAllAllowed ( props . getBoolean ( ClientConfig . ALLOW_PARTIAL_GETALLS_PROPERTY ) ) ; |
public class Counters { /** * Returns a comparator suitable for sorting this Counter ' s keys or entries by
* their respective value or magnitude ( by absolute value ) . If
* < tt > ascending < / tt > is true , smaller magnitudes will be returned first ,
* otherwise higher magnitudes will be returned first .
* Sample usage :
* < pre >
* Counter c = new Counter ( ) ;
* / / add to the counter . . .
* List biggestKeys = new ArrayList ( c . keySet ( ) ) ;
* Collections . sort ( biggestAbsKeys , Counters . comparator ( c , false , true ) ) ;
* List smallestEntries = new ArrayList ( c . entrySet ( ) ) ;
* Collections . sort ( smallestEntries , Counters . comparator ( c , true , false ) ) ;
* < / pre > */
public static < E > Comparator < E > toComparator ( final Counter < E > counter , final boolean ascending , final boolean useMagnitude ) { } } | return new Comparator < E > ( ) { public int compare ( E o1 , E o2 ) { if ( ascending ) { if ( useMagnitude ) { return Double . compare ( Math . abs ( counter . getCount ( o1 ) ) , Math . abs ( counter . getCount ( o2 ) ) ) ; } else { return Double . compare ( counter . getCount ( o1 ) , counter . getCount ( o2 ) ) ; } } else { // Descending
if ( useMagnitude ) { return Double . compare ( Math . abs ( counter . getCount ( o2 ) ) , Math . abs ( counter . getCount ( o1 ) ) ) ; } else { return Double . compare ( counter . getCount ( o2 ) , counter . getCount ( o1 ) ) ; } } } } ; |
public class AsaCalculator { /** * Returns the 2 - dimensional array with neighbor indices for every atom .
* @ return 2 - dimensional array of size : n _ atoms x n _ neighbors _ per _ atom */
int [ ] [ ] findNeighborIndices ( ) { } } | // looking at a typical protein case , number of neighbours are from ~ 10 to ~ 50 , with an average of ~ 30
int initialCapacity = 60 ; int [ ] [ ] nbsIndices = new int [ atomCoords . length ] [ ] ; for ( int k = 0 ; k < atomCoords . length ; k ++ ) { double radius = radii [ k ] + probe + probe ; List < Integer > thisNbIndices = new ArrayList < > ( initialCapacity ) ; for ( int i = 0 ; i < atomCoords . length ; i ++ ) { if ( i == k ) continue ; double dist = atomCoords [ i ] . distance ( atomCoords [ k ] ) ; if ( dist < radius + radii [ i ] ) { thisNbIndices . add ( i ) ; } } int [ ] indicesArray = new int [ thisNbIndices . size ( ) ] ; for ( int i = 0 ; i < thisNbIndices . size ( ) ; i ++ ) indicesArray [ i ] = thisNbIndices . get ( i ) ; nbsIndices [ k ] = indicesArray ; } return nbsIndices ; |
public class BasicExpressionParser { /** * Parse expression .
* @ param sqlStatement SQL statement
* @ return expression */
public SQLExpression parse ( final SQLStatement sqlStatement ) { } } | int beginPosition = lexerEngine . getCurrentToken ( ) . getEndPosition ( ) ; SQLExpression result = parseExpression ( sqlStatement ) ; if ( result instanceof SQLPropertyExpression ) { setTableToken ( sqlStatement , beginPosition , ( SQLPropertyExpression ) result ) ; } return result ; |
public class HttpBuilder { /** * Executes a HEAD request on the configured URI , with additional configuration provided by the configuration function . The result will be cast to
* the specified ` type ` .
* This method is generally used for Java - specific configuration .
* [ source , groovy ]
* HttpBuilder http = HttpBuilder . configure ( config - > {
* config . getRequest ( ) . setUri ( " http : / / localhost : 10101 " ) ;
* String result = http . head ( String . class , config - > {
* config . getRequest ( ) . getUri ( ) . setPath ( " / foo " ) ;
* The ` configuration ` { @ link Consumer } allows additional configuration for this request based on the { @ link HttpConfig } interface .
* @ param type the type of the response content
* @ param configuration the additional configuration function ( delegated to { @ link HttpConfig } )
* @ return the resulting content cast to the specified type */
public < T > T head ( final Class < T > type , final Consumer < HttpConfig > configuration ) { } } | return type . cast ( interceptors . get ( HttpVerb . HEAD ) . apply ( configureRequest ( type , HttpVerb . HEAD , configuration ) , this :: doHead ) ) ; |
public class XMLServiceDocumentWriter { /** * This writes all singletons in entity data model as collection of " metadata : singleton " .
* @ param writer which writes to stream .
* @ throws XMLStreamException in case of any xml errors
* @ throws ODataRenderException if entity container is null . */
private void writeSingleton ( XMLStreamWriter writer ) throws XMLStreamException , ODataRenderException { } } | List < Singleton > singletons = getEntityContainer ( ) . getSingletons ( ) ; LOG . debug ( "Number of singletons to be written in service document are {}" , singletons . size ( ) ) ; for ( Singleton singleton : singletons ) { writeElement ( writer , METADATA , SERVICE_SINGLETON , ODATA_METADATA_NS , singleton . getName ( ) , singleton . getName ( ) ) ; } |
public class UserTunnel { /** * Returns the ActiveConnection object associated with this tunnel within
* the AuthenticationProvider and UserContext which created the tunnel . If
* the AuthenticationProvider is not tracking active connections , or this
* tunnel is no longer active , this will be null .
* @ return
* The ActiveConnection object associated with this tunnel , or null if
* this tunnel is no longer active or the AuthenticationProvider which
* created the tunnel is not tracking active connections .
* @ throws GuacamoleException
* If an error occurs which prevents retrieval of the user ' s current
* active connections . */
public ActiveConnection getActiveConnection ( ) throws GuacamoleException { } } | // Pull the UUID of the current tunnel
UUID uuid = getUUID ( ) ; // Get the directory of active connections
Directory < ActiveConnection > activeConnectionDirectory = userContext . getActiveConnectionDirectory ( ) ; Collection < String > activeConnectionIdentifiers = activeConnectionDirectory . getIdentifiers ( ) ; // Search all connections for a tunnel which matches this tunnel
for ( ActiveConnection activeConnection : activeConnectionDirectory . getAll ( activeConnectionIdentifiers ) ) { // If we lack access , continue with next tunnel
GuacamoleTunnel tunnel = activeConnection . getTunnel ( ) ; if ( tunnel == null ) continue ; // Tunnels are equivalent if they have the same UUID
if ( uuid . equals ( tunnel . getUUID ( ) ) ) return activeConnection ; } // No active connection associated with this tunnel
return null ; |
public class Channel { /** * Enables access to event mechanism of a channel and is normally not used by clients directly .
* @ param evt sends an Event to a specific protocol layer and receives a response .
* @ return a response from a particular protocol layer targeted by Event parameter */
public Object down ( Event evt ) { } } | if ( evt . type ( ) == 1 ) // MSG
return ch . down ( ( Message ) evt . getArg ( ) ) ; return ch . down ( evt ) ; |
public class JsonBuilderHandler { /** * This method removes all the json which is not mapped into a
* method inside the JsonBuilder Object .
* Also if the proxy contains another JsonBuilder in their methods
* the method strip ( ) is called . */
private void stripProxy ( JsonBuilder proxy ) throws Throwable { } } | Class < ? > type = proxy . getClass ( ) . getInterfaces ( ) [ 0 ] ; HashSet < String > validAttrs = getAttributeNames ( type . getMethods ( ) ) ; Hashtable < String , Method > ispropertyGetters = getJsonBuilders ( type . getMethods ( ) ) ; for ( String key : jsonObject . keys ( ) ) { String name = methodName2AttrName ( key ) ; if ( ! validAttrs . contains ( name ) ) { jsonObject . remove ( key ) ; continue ; } Method ispropertyGetter = ispropertyGetters . get ( name ) ; if ( ispropertyGetter != null ) { ( ( IsProperties ) invoke ( proxy , ispropertyGetter , new Object [ ] { } ) ) . strip ( ) ; } } |
public class ManagedHotdeploy { public static synchronized ClassLoader start ( ) { } } | if ( isHotdeploy ( ) ) { final ClassLoader originalLoader = getThreadContextClassLoader ( ) ; if ( isAnotherThreadHotdeploy ( ) ) { // e . g . job started
inheritAnotherThreadClassLoader ( ) ; // to use same loader
} else { // normally here
// _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ / _ /
// remove this if - statement to avoid context class - loader being null by jflute ( 2017/12/17)
// if stop ( ) without start ( ) , context class - loader becomes null
// if hot - deploy process makes new thread , the thread inherits
// hot - deploy class - loader as context class - loader
// so this if - statement causes stop ( ) without start ( )
// ( though hot - deploy class - loader may wrap hot - deploy class - loader , but no problem ? )
// _ / _ / _ / _ / _ / _ / _ / _ / _ / _ /
// if ( ! isThreadContextHotdeploy ( ) ) {
HotdeployUtil . start ( ) ; } ++ hotdeployCount ; return originalLoader ; } else { return null ; } |
public class JSONTokener { /** * 获得源字符串中的下一个字符
* @ return 下一个字符 , or 0 if past the end of the source string .
* @ throws JSONException JSON异常 , 包装IO异常 */
public char next ( ) throws JSONException { } } | int c ; if ( this . usePrevious ) { this . usePrevious = false ; c = this . previous ; } else { try { c = this . reader . read ( ) ; } catch ( IOException exception ) { throw new JSONException ( exception ) ; } if ( c <= 0 ) { // End of stream
this . eof = true ; c = 0 ; } } this . index += 1 ; if ( this . previous == '\r' ) { this . line += 1 ; this . character = c == '\n' ? 0 : 1 ; } else if ( c == '\n' ) { this . line += 1 ; this . character = 0 ; } else { this . character += 1 ; } this . previous = ( char ) c ; return this . previous ; |
public class AbstractRedisStorage { /** * Get the names of all of the < code > { @ link org . quartz . Calendar } < / code > s in the < code > JobStore < / code > .
* @ param jedis a thread - safe Redis connection
* @ return the names of all calendars or an empty list if no calendars exist */
public List < String > getCalendarNames ( T jedis ) { } } | final Set < String > calendarsSet = jedis . smembers ( redisSchema . calendarsSet ( ) ) ; List < String > calendars = new ArrayList < > ( calendarsSet . size ( ) ) ; for ( String group : calendarsSet ) { calendars . add ( redisSchema . calendarName ( group ) ) ; } return calendars ; |
public class AbstractCommonShapeFileWriter { /** * Flush temp buffers , write down final information in
* file header ( file size . . . ) , and close the streams .
* @ throws IOException in case of error . */
@ Override @ SuppressWarnings ( "resource" ) public void close ( ) throws IOException { } } | flush ( ) ; if ( this . tempStream != null ) { this . tempStream . close ( ) ; this . tempStream = null ; } if ( this . stream != null && this . stream . isOpen ( ) ) { // Copy the channels
if ( this . buffer != null && this . tempFile != null ) { try { try ( ReadableByteChannel in = Channels . newChannel ( new FileInputStream ( this . tempFile ) ) ) { final ByteBuffer hbuffer = ByteBuffer . allocate ( 100 ) ; in . read ( hbuffer ) ; hbuffer . limit ( 100 ) ; hbuffer . position ( 6 * 4 ) ; hbuffer . order ( ByteOrder . BIG_ENDIAN ) ; hbuffer . putInt ( toESRIWords ( this . bufferPosition ) ) ; hbuffer . rewind ( ) ; this . stream . write ( hbuffer ) ; int nbRead ; this . buffer . rewind ( ) ; while ( ( nbRead = in . read ( this . buffer ) ) >= 0 ) { this . buffer . rewind ( ) ; this . buffer . limit ( nbRead ) ; this . stream . write ( this . buffer ) ; this . buffer . rewind ( ) ; this . buffer . limit ( this . buffer . capacity ( ) ) ; } } } finally { this . tempFile . delete ( ) ; this . tempFile = null ; } } this . stream . close ( ) ; } |
public class GSMXImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setMODE ( Integer newMODE ) { } } | Integer oldMODE = mode ; mode = newMODE ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . GSMX__MODE , oldMODE , mode ) ) ; |
public class CProductPersistenceImpl { /** * Returns the c product where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching c product , or < code > null < / code > if a matching c product could not be found */
@ Override public CProduct fetchByUUID_G ( String uuid , long groupId ) { } } | return fetchByUUID_G ( uuid , groupId , true ) ; |
public class LTPAKeyFileUtilityImpl { /** * Obtain the OutputStream for the given file .
* @ param keyFile
* @ return
* @ throws IOException */
private OutputStream getOutputStream ( final String keyFile ) throws IOException { } } | try { return AccessController . doPrivileged ( new PrivilegedExceptionAction < OutputStream > ( ) { @ Override public OutputStream run ( ) throws IOException { return new FileOutputStream ( new File ( keyFile ) ) ; } } ) ; } catch ( PrivilegedActionException e ) { // Wrap the wrapped IOException from doPriv in an IOException and re - throw
throw new IOException ( e . getCause ( ) ) ; } |
public class Authorization { /** * Checks if the User passed has access to the alias to perform an action
* < ul >
* < li > When Messaging Security is disabled , it always returns true < / li >
* < li > When Messaging Security is enabled , it calls
* MessagingAuthorizationService to check for access < / li >
* < / ul >
* @ param authenticatedSubject
* Subject got after authenticating the user
* @ param destination
* Target Destination for the alias destination
* @ param destinationType
* @ param aliasDesination
* Alias Destination which user is requesting to access
* @ param operationType
* Type of operation ( SEND , RECEIVE , BROWSE )
* @ return true : If the User is authorized
* false : If the User is not authorized */
public boolean checkAliasAccess ( Subject authenticatedSubject , String destination , String aliasDestination , int destinationType , String operationType ) throws MessagingAuthorizationException { } } | SibTr . entry ( tc , CLASS_NAME + "checkAliasAccess" , new Object [ ] { authenticatedSubject , destination , operationType , aliasDestination } ) ; boolean result = false ; if ( ! runtimeSecurityService . isMessagingSecure ( ) ) { result = true ; } else { if ( messagingAuthorizationService != null ) { result = messagingAuthorizationService . checkAliasAccess ( authenticatedSubject , destination , aliasDestination , destinationType , operationType , true ) ; } } SibTr . exit ( tc , CLASS_NAME + "checkAliasAccess" , result ) ; return result ; |
public class DateTime { /** * Returns a copy of this datetime minus the specified number of years .
* The calculation will do its best to only change the year field
* retaining the same month of year .
* However , in certain circumstances , it may be necessary to alter
* smaller fields . For example , 2008-02-29 minus one year cannot result
* in 2007-02-29 , so the day of month is adjusted to 2007-02-28.
* The following three lines are identical in effect :
* < pre >
* DateTime subtracted = dt . minusYears ( 6 ) ;
* DateTime subtracted = dt . minus ( Period . years ( 6 ) ) ;
* DateTime subtracted = dt . withFieldAdded ( DurationFieldType . years ( ) , - 6 ) ;
* < / pre >
* This datetime instance is immutable and unaffected by this method call .
* @ param years the amount of years to subtract , may be negative
* @ return the new datetime minus the increased years
* @ since 1.1 */
public DateTime minusYears ( int years ) { } } | if ( years == 0 ) { return this ; } long instant = getChronology ( ) . years ( ) . subtract ( getMillis ( ) , years ) ; return withMillis ( instant ) ; |
public class DefaultComposedValueConverter { /** * @ see # addConverter ( ValueConverter )
* @ param converter is the converter to add . */
public void addConverterComponent ( ValueConverter < ? , ? > converter ) { } } | if ( converter instanceof AbstractRecursiveValueConverter ) { ( ( AbstractRecursiveValueConverter < ? , ? > ) converter ) . setComposedValueConverter ( this ) ; } if ( converter instanceof AbstractComponent ) { ( ( AbstractComponent ) converter ) . initialize ( ) ; } addConverter ( converter ) ; |
public class Resolve { /** * Resolve constructor using diamond inference .
* @ param pos The position to use for error reporting .
* @ param env The environment current at the constructor invocation .
* @ param site The type of class for which a constructor is searched .
* The scope of this class has been touched in attribution .
* @ param argtypes The types of the constructor invocation ' s value
* arguments .
* @ param typeargtypes The types of the constructor invocation ' s type
* arguments . */
Symbol resolveDiamond ( DiagnosticPosition pos , Env < AttrContext > env , Type site , List < Type > argtypes , List < Type > typeargtypes ) { } } | return lookupMethod ( env , pos , site . tsym , resolveMethodCheck , new BasicLookupHelper ( names . init , site , argtypes , typeargtypes ) { @ Override Symbol doLookup ( Env < AttrContext > env , MethodResolutionPhase phase ) { return findDiamond ( env , site , argtypes , typeargtypes , phase . isBoxingRequired ( ) , phase . isVarargsRequired ( ) ) ; } @ Override Symbol access ( Env < AttrContext > env , DiagnosticPosition pos , Symbol location , Symbol sym ) { if ( sym . kind >= AMBIGUOUS ) { if ( sym . kind != WRONG_MTH && sym . kind != WRONG_MTHS ) { sym = super . access ( env , pos , location , sym ) ; } else { final JCDiagnostic details = sym . kind == WRONG_MTH ? ( ( InapplicableSymbolError ) sym . baseSymbol ( ) ) . errCandidate ( ) . snd : null ; sym = new InapplicableSymbolError ( sym . kind , "diamondError" , currentResolutionContext ) { @ Override JCDiagnostic getDiagnostic ( DiagnosticType dkind , DiagnosticPosition pos , Symbol location , Type site , Name name , List < Type > argtypes , List < Type > typeargtypes ) { String key = details == null ? "cant.apply.diamond" : "cant.apply.diamond.1" ; return diags . create ( dkind , log . currentSource ( ) , pos , key , diags . fragment ( "diamond" , site . tsym ) , details ) ; } } ; sym = accessMethod ( sym , pos , site , names . init , true , argtypes , typeargtypes ) ; env . info . pendingResolutionPhase = currentResolutionContext . step ; } } return sym ; } } ) ; |
public class IndexChangeAdapters { /** * Create an { @ link IndexChangeAdapter } implementation that handles the " mode : localName " property .
* @ param context the execution context ; may not be null
* @ param matcher the node type matcher used to determine which nodes should be included in the index ; may not be null
* @ param workspaceName the name of the workspace ; may not be null
* @ param index the local index that should be used ; may not be null
* @ return the new { @ link IndexChangeAdapter } ; never null */
public static IndexChangeAdapter forNodeLocalName ( ExecutionContext context , NodeTypePredicate matcher , String workspaceName , ProvidedIndex < ? > index ) { } } | return new NodeLocalNameChangeAdapter ( context , matcher , workspaceName , index ) ; |
public class LCMSData { /** * Releases all memory by calling { @ link LCMSDataSource # releaseMemory ( ) } and { @ link
* IScanCollection # reset ( ) } . Effectively , you get this object to the same state as it was after
* calling the constructor , however any ScanCollection configurations are preserved ( e . g . spectra
* auto - loading ) setting .
* < b > IMPORTANT : will clear the registry of loaded subsets without warning ! < / b >
* It ' s up to you to make sure you don ' t call this method while some component is still using the
* data . */
public synchronized void releaseMemory ( ) { } } | isReleasingMemory = true ; userPhantomRefs . clear ( ) ; cache . invalidateAll ( ) ; scans . reset ( ) ; source . releaseMemory ( ) ; // loadedSubsets . clear ( ) ;
isReleasingMemory = false ; |
public class CmsDefaultXmlContentHandler { /** * Initializes the relation configuration for this content handler . < p >
* OpenCms performs link checks for all OPTIONAL links defined in XML content values of type
* OpenCmsVfsFile . However , for most projects in the real world a more fine - grained control
* over the link check process is required . For these cases , individual relation behavior can
* be defined for the appinfo node . < p >
* Additional here can be defined an optional type for the relations , for instance . < p >
* @ param root the " relations " element from the appinfo node of the XML content definition
* @ param contentDefinition the content definition the check rules belong to
* @ throws CmsXmlException if something goes wrong */
protected void initRelations ( Element root , CmsXmlContentDefinition contentDefinition ) throws CmsXmlException { } } | Iterator < Element > i = CmsXmlGenericWrapper . elementIterator ( root , APPINFO_RELATION ) ; while ( i . hasNext ( ) ) { // iterate all " checkrule " elements in the " checkrule " node
Element element = i . next ( ) ; String elementName = element . attributeValue ( APPINFO_ATTR_ELEMENT ) ; String invalidate = element . attributeValue ( APPINFO_ATTR_INVALIDATE ) ; if ( invalidate != null ) { invalidate = invalidate . toUpperCase ( ) ; } String type = element . attributeValue ( APPINFO_ATTR_TYPE ) ; if ( type != null ) { type = type . toLowerCase ( ) ; } if ( elementName != null ) { // add a check rule for the element
addCheckRule ( contentDefinition , elementName , invalidate , type ) ; } } |
public class TileDaoUtils { /** * Get the zoom level for the provided width and height in the default units
* @ param widths
* sorted widths
* @ param heights
* sorted heights
* @ param tileMatrices
* tile matrices
* @ param width
* width in default units
* @ param height
* height in default units
* @ param lengthChecks
* perform length checks for values too far away from the zoom
* level
* @ return tile matrix zoom level
* @ since 1.2.1 */
private static Long getZoomLevel ( double [ ] widths , double [ ] heights , List < TileMatrix > tileMatrices , double width , double height , boolean lengthChecks ) { } } | Long zoomLevel = null ; // Find where the width and height fit in
int widthIndex = Arrays . binarySearch ( widths , width ) ; if ( widthIndex < 0 ) { widthIndex = ( widthIndex + 1 ) * - 1 ; } int heightIndex = Arrays . binarySearch ( heights , height ) ; if ( heightIndex < 0 ) { heightIndex = ( heightIndex + 1 ) * - 1 ; } // Find the closest width or verify it isn ' t too small or large
if ( widthIndex == 0 ) { if ( lengthChecks && width < getMinLength ( widths ) ) { widthIndex = - 1 ; } } else if ( widthIndex == widths . length ) { if ( lengthChecks && width >= getMaxLength ( widths ) ) { widthIndex = - 1 ; } else { widthIndex = widthIndex - 1 ; } } else if ( closerToZoomIn ( widths , width , widthIndex ) ) { widthIndex -- ; } // Find the closest height or verify it isn ' t too small or large
if ( heightIndex == 0 ) { if ( lengthChecks && height < getMinLength ( heights ) ) { heightIndex = - 1 ; } } else if ( heightIndex == heights . length ) { if ( lengthChecks && height >= getMaxLength ( heights ) ) { heightIndex = - 1 ; } else { heightIndex = heightIndex - 1 ; } } else if ( closerToZoomIn ( heights , height , heightIndex ) ) { heightIndex -- ; } if ( widthIndex >= 0 || heightIndex >= 0 ) { // Use one zoom size smaller if possible
int index ; if ( widthIndex < 0 ) { index = heightIndex ; } else if ( heightIndex < 0 ) { index = widthIndex ; } else { index = Math . min ( widthIndex , heightIndex ) ; } TileMatrix tileMatrix = getTileMatrixAtLengthIndex ( tileMatrices , index ) ; zoomLevel = tileMatrix . getZoomLevel ( ) ; } return zoomLevel ; |
public class Solo { /** * Returns a TextView matching the specified index .
* @ param index the index of the { @ link TextView } . { @ code 0 } if only one is available
* @ return a { @ link TextView } matching the specified index */
public TextView getText ( int index ) { } } | if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "getText(" + index + ")" ) ; } return getter . getView ( TextView . class , index ) ; |
public class Entity { /** * Returns true if the provided property is a persistent property ; false , otherwise .
* @ return
* returned object is { @ link boolean } */
public boolean isPersistentProperty ( String propName ) { } } | if ( transientProperties == null ) { setTransientPropertyNames ( ) ; } if ( transientProperties . contains ( propName ) ) { return false ; } else { return true ; } |
public class JunitNotifier { /** * ( non - Javadoc )
* @ see
* com . technophobia . substeps . runner . AbstractBaseNotifier # handleNotifyNodeFailed
* ( com . technophobia . substeps . execution . ExecutionNode , java . lang . Throwable ) */
public void onNodeFailed ( final IExecutionNode node , final Throwable cause ) { } } | final Description description = descriptionMap . get ( Long . valueOf ( node . getId ( ) ) ) ; notifyTestFailed ( description , cause ) ; |
public class InternalService { /** * Returns observable to remove list of participants from a conversation .
* @ param conversationId ID of a conversation to delete .
* @ param ids List of participant ids to be removed .
* @ return Observable to remove list of participants from a conversation . */
public Observable < ComapiResult < Void > > removeParticipants ( @ NonNull final String conversationId , @ NonNull final List < String > ids ) { } } | final String token = getToken ( ) ; if ( sessionController . isCreatingSession ( ) ) { return getTaskQueue ( ) . queueRemoveParticipants ( conversationId , ids ) ; } else if ( TextUtils . isEmpty ( token ) ) { return Observable . error ( getSessionStateErrorDescription ( ) ) ; } else { return doRemoveParticipants ( token , conversationId , ids ) ; } |
public class ViewPropertyAnimatorPreHC { /** * This method gets the value of the named property from the View object .
* @ param propertyConstant The property whose value should be returned
* @ return float The value of the named property */
private float getValue ( int propertyConstant ) { } } | // final View . TransformationInfo info = mView . mTransformationInfo ;
switch ( propertyConstant ) { case TRANSLATION_X : // return info . mTranslationX ;
return mProxy . getTranslationX ( ) ; case TRANSLATION_Y : // return info . mTranslationY ;
return mProxy . getTranslationY ( ) ; case ROTATION : // return info . mRotation ;
return mProxy . getRotation ( ) ; case ROTATION_X : // return info . mRotationX ;
return mProxy . getRotationX ( ) ; case ROTATION_Y : // return info . mRotationY ;
return mProxy . getRotationY ( ) ; case SCALE_X : // return info . mScaleX ;
return mProxy . getScaleX ( ) ; case SCALE_Y : // return info . mScaleY ;
return mProxy . getScaleY ( ) ; case X : // return mView . mLeft + info . mTranslationX ;
return mProxy . getX ( ) ; case Y : // return mView . mTop + info . mTranslationY ;
return mProxy . getY ( ) ; case ALPHA : // return info . mAlpha ;
return mProxy . getAlpha ( ) ; } return 0 ; |
public class ZMQ { /** * Starts the built - in 0MQ proxy in the current application thread .
* The proxy connects a frontend socket to a backend socket . Conceptually , data flows from frontend to backend .
* Depending on the socket types , replies may flow in the opposite direction . The direction is conceptual only ;
* the proxy is fully symmetric and there is no technical difference between frontend and backend .
* Before calling ZMQ . proxy ( ) you must set any socket options , and connect or bind both frontend and backend sockets .
* The two conventional proxy models are :
* ZMQ . proxy ( ) runs in the current thread and returns only if / when the current context is closed .
* @ param frontend ZMQ . Socket
* @ param backend ZMQ . Socket
* @ param capture If the capture socket is not NULL , the proxy shall send all messages , received on both
* frontend and backend , to the capture socket . The capture socket should be a
* ZMQ _ PUB , ZMQ _ DEALER , ZMQ _ PUSH , or ZMQ _ PAIR socket . */
public static boolean proxy ( Socket frontend , Socket backend , Socket capture ) { } } | return zmq . ZMQ . proxy ( frontend . base , backend . base , capture != null ? capture . base : null ) ; |
public class JdbcStoreConfigurationParser60 { /** * This method is public static so that it can be reused by custom cache store / loader configuration parsers */
public static void parseCommonLoaderAttributes ( XMLExtendedStreamReader reader , int i , StoreConfigurationBuilder < ? , ? > builder ) throws XMLStreamException { } } | throw ParseUtils . unexpectedAttribute ( reader , i ) ; |
public class CmsGwtDialogExtension { /** * Opens the publish dialog for the given project . < p >
* @ param project the project for which to open the dialog
* @ param directPublishResources the resources for which to open the publish dialog . */
protected void openPublishDailog ( CmsProject project , List < CmsResource > directPublishResources ) { } } | CmsPublishData publishData = getPublishData ( project , directPublishResources ) ; String data = getSerializedPublishData ( publishData ) ; getRpcProxy ( I_CmsGwtDialogClientRpc . class ) . openPublishDialog ( data ) ; |
public class QueryImpl { /** * ( non - Javadoc )
* @ see
* javax . persistence . Query # getParameterValue ( javax . persistence . Parameter ) */
@ Override public < T > T getParameterValue ( Parameter < T > paramParameter ) { } } | Object value = kunderaQuery . getClauseValue ( paramParameter ) ; if ( value == null ) { throw new IllegalStateException ( "parameter has not been bound" + paramParameter ) ; } return ( T ) value ; |
public class HttpClientChannelInitializer { /** * Use netty proxy handler only if scheme is https */
private void configureProxyServer ( ChannelPipeline clientPipeline ) { } } | if ( proxyServerConfiguration != null && sslConfig != null ) { if ( proxyServerConfiguration . getProxyUsername ( ) != null && proxyServerConfiguration . getProxyPassword ( ) != null ) { clientPipeline . addLast ( Constants . PROXY_HANDLER , new HttpProxyHandler ( proxyServerConfiguration . getInetSocketAddress ( ) , proxyServerConfiguration . getProxyUsername ( ) , proxyServerConfiguration . getProxyPassword ( ) ) ) ; } else { clientPipeline . addLast ( Constants . PROXY_HANDLER , new HttpProxyHandler ( proxyServerConfiguration . getInetSocketAddress ( ) ) ) ; } } |
public class CDIInterceptorWrapperImpl { /** * Liberty change begin */
@ FFDCIgnore ( PrivilegedActionException . class ) private static Class < ? > getFTAnnotationClass ( ) { } } | try { return AccessController . doPrivileged ( ( PrivilegedExceptionAction < Class < ? > > ) ( ) -> { return Class . forName ( "com.ibm.ws.microprofile.faulttolerance.cdi.FaultTolerance" ) ; } ) ; } catch ( PrivilegedActionException pae ) { if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . log ( Level . FINEST , "Exception checking for MP Fault Tolerance class - " + "expected if FT feature isnot enabled" , pae ) ; } return null ; } |
public class Cells { /** * Returns the { @ code Integer } value of the { @ link Cell } ( associated to { @ code table } ) whose name iscellName , or
* null if this Cells object contains no cell whose name is cellName .
* @ param nameSpace the name of the owning table
* @ param cellName the name of the Cell we want to retrieve from this Cells object .
* @ return the { @ code Integer } value of the { @ link Cell } ( associated to { @ code table } ) whose name is cellName , or
* null if this Cells object contains no cell whose name is cellName */
public Integer getInteger ( String nameSpace , String cellName ) { } } | return getValue ( nameSpace , cellName , Integer . class ) ; |
public class Lexer { /** * / * \ M - , \ C - , \ c , or \ . . . */
private void fetchEscapedValue ( ) { } } | if ( ! left ( ) ) newSyntaxException ( END_PATTERN_AT_ESCAPE ) ; fetch ( ) ; switch ( c ) { case 'M' : if ( syntax . op2EscCapitalMBarMeta ( ) ) { if ( ! left ( ) ) newSyntaxException ( END_PATTERN_AT_META ) ; fetch ( ) ; if ( c != '-' ) newSyntaxException ( META_CODE_SYNTAX ) ; if ( ! left ( ) ) newSyntaxException ( END_PATTERN_AT_META ) ; fetch ( ) ; if ( c == syntax . metaCharTable . esc ) fetchEscapedValue ( ) ; c = ( ( c & 0xff ) | 0x80 ) ; } else { fetchEscapedValueBackSlash ( ) ; } break ; case 'C' : if ( syntax . op2EscCapitalCBarControl ( ) ) { if ( ! left ( ) ) newSyntaxException ( END_PATTERN_AT_CONTROL ) ; fetch ( ) ; if ( c != '-' ) newSyntaxException ( CONTROL_CODE_SYNTAX ) ; fetchEscapedValueControl ( ) ; } else { fetchEscapedValueBackSlash ( ) ; } break ; case 'c' : if ( syntax . opEscCControl ( ) ) { fetchEscapedValueControl ( ) ; } /* fall through */
default : fetchEscapedValueBackSlash ( ) ; } // switch |
public class ImportSnapshotRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < ImportSnapshotRequest > getDryRunRequest ( ) { } } | Request < ImportSnapshotRequest > request = new ImportSnapshotRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class JsonParseUtil { /** * Parses the current token as a list of Enums .
* @ param < T >
* @ param parser
* @ param clazz
* @ return List of { @ link Enum } s
* @ throws IOException
* @ throws JsonFormatException
* @ author vvakame */
public static < T extends Enum < T > > List < T > parserEnumList ( JsonPullParser parser , Class < T > clazz ) throws IOException , JsonFormatException { } } | State eventType = parser . getEventType ( ) ; if ( eventType == State . VALUE_NULL ) { return null ; } if ( eventType != State . START_ARRAY ) { throw new IllegalStateException ( "not started brace!" ) ; } List < T > list = new ArrayList < T > ( ) ; while ( parser . lookAhead ( ) != State . END_ARRAY ) { eventType = parser . getEventType ( ) ; if ( eventType == State . VALUE_NULL ) { list . add ( null ) ; } else if ( eventType == State . VALUE_STRING ) { T obj = Enum . valueOf ( clazz , parser . getValueString ( ) ) ; list . add ( obj ) ; } else { throw new IllegalStateException ( "unexpected state. expected=VALUE_STRING, but get=" + eventType . toString ( ) ) ; } } parser . getEventType ( ) ; return list ; |
public class PEMKeyStore { /** * Persist the security material in this keystore . If the object has a path
* associated with it , the object will be persisted to that path . Otherwise
* it will be stored in the default certificate directory . As a result , the
* parameters of this method are ignored .
* @ param outputStream
* This parameter is ignored .
* @ param chars
* This parameter is ignored .
* @ throws IOException
* @ throws NoSuchAlgorithmException
* @ throws CertificateException */
@ Override public void engineStore ( OutputStream outputStream , char [ ] chars ) throws IOException , NoSuchAlgorithmException , CertificateException { } } | for ( SecurityObjectWrapper < ? > object : this . aliasObjectMap . values ( ) ) { if ( object instanceof Storable ) { try { ( ( Storable ) object ) . store ( ) ; } catch ( ResourceStoreException e ) { throw new CertificateException ( e ) ; } } } |
public class NanoHTTPD { /** * Override this to customize the server .
* ( By default , this delegates to serveFile ( ) and allows directory listing . )
* @ param session
* The HTTP session
* @ return HTTP response , see class Response for details */
public Response serve ( IHTTPSession session ) { } } | Map < String , String > files = new HashMap < > ( ) ; Method method = session . getMethod ( ) ; if ( Method . PUT . equals ( method ) || Method . POST . equals ( method ) ) { try { session . parseBody ( files ) ; } catch ( IOException ioe ) { return new Response ( Response . Status . INTERNAL_ERROR , MIME_PLAINTEXT , "SERVER INTERNAL ERROR: IOException: " + ioe . getMessage ( ) ) ; } catch ( ResponseException re ) { return new Response ( re . getStatus ( ) , MIME_PLAINTEXT , re . getMessage ( ) ) ; } } Map < String , String > parms = session . getParms ( ) ; parms . put ( QUERY_STRING_PARAMETER , session . getQueryParameterString ( ) ) ; return serve ( session . getUri ( ) , method , session . getHeaders ( ) , parms , files ) ; |
public class HtmlBuilder { /** * Build a String containing a HTML opening tag with given CSS class and / or
* style and concatenates the given HTML content .
* @ param tag String name of HTML tag
* @ param clazz CSS class of the tag
* @ param style style for tag ( plain CSS )
* @ param content content string
* @ return HTML tag element as string */
public static String openTagHtmlContent ( String tag , String clazz , String style , String ... content ) { } } | return openTag ( tag , clazz , style , true , content ) ; |