signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Slf4jAdapter { /** * { @ inheritDoc } */ @ Override public void error ( final MessageItem messageItem , final Object ... parameters ) { } }
if ( getLogger ( ) . isErrorEnabled ( messageItem . getMarker ( ) ) ) { getLogger ( ) . error ( messageItem . getMarker ( ) , messageItem . getText ( parameters ) ) ; } throwError ( messageItem , null , parameters ) ;
public class ApiOvhSupport { /** * Create a new ticket * REST : POST / support / tickets / create * @ param product [ required ] Ticket message product * @ param category [ required ] Ticket message category * @ param type [ required ] Ticket type ( criticalIntervention requires VIP support level ) * @ param body [ required ] Ticket message body * @ param serviceName [ required ] Ticket message service name * @ param subcategory [ required ] Ticket message subcategory * @ param subject [ required ] Ticket message subject */ public OvhNewMessageInfo tickets_create_POST ( String body , OvhTicketCategoryEnum category , OvhTicketProductEnum product , String serviceName , OvhTicketSubCategoryEnum subcategory , String subject , OvhTicketTypeEnum type ) throws IOException { } }
String qPath = "/support/tickets/create" ; StringBuilder sb = path ( qPath ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "body" , body ) ; addBody ( o , "category" , category ) ; addBody ( o , "product" , product ) ; addBody ( o , "serviceName" , serviceName ) ; addBody ( o , "subcategory" , subcategory ) ; addBody ( o , "subject" , subject ) ; addBody ( o , "type" , type ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhNewMessageInfo . class ) ;
public class LettuceCdiExtension { /** * Implementation of a an observer which registers beans to the CDI container for the detected RedisURIs . * The repository beans are associated to the EntityManagers using their qualifiers . * @ param beanManager The BeanManager instance . */ void afterBeanDiscovery ( @ Observes AfterBeanDiscovery afterBeanDiscovery , BeanManager beanManager ) { } }
int counter = 0 ; for ( Entry < Set < Annotation > , Bean < RedisURI > > entry : redisUris . entrySet ( ) ) { Bean < RedisURI > redisUri = entry . getValue ( ) ; Set < Annotation > qualifiers = entry . getKey ( ) ; String clientBeanName = RedisClient . class . getSimpleName ( ) ; String clusterClientBeanName = RedisClusterClient . class . getSimpleName ( ) ; if ( ! containsDefault ( qualifiers ) ) { clientBeanName += counter ; clusterClientBeanName += counter ; counter ++ ; } Bean < ClientResources > clientResources = this . clientResources . get ( qualifiers ) ; RedisClientCdiBean clientBean = new RedisClientCdiBean ( redisUri , clientResources , beanManager , qualifiers , clientBeanName ) ; register ( afterBeanDiscovery , qualifiers , clientBean ) ; RedisClusterClientCdiBean clusterClientBean = new RedisClusterClientCdiBean ( redisUri , clientResources , beanManager , qualifiers , clusterClientBeanName ) ; register ( afterBeanDiscovery , qualifiers , clusterClientBean ) ; }
public class Pattern { /** * Parse a string containing a SQL - style pattern * @ param pattern the string containing the pattern * @ param escaped true if the string employs an escape character * @ param escape the escape character ( ignored if ! escaped ) * @ return an Object representing the result of the parse , as follows . * < ul > * < li > null if the pattern is syntactically invalid * < li > the pattern as a string if the pattern contains no unescaped special characters * and hence is better treated as an equality test . Any escape chararacters will * have been removed . * < li > the matchMany object if the pattern will match any string at all . This is * better handled as a " NOT NULL " test . * < li > the pattern as a Pattern object otherwise . * < / ul > */ public static Object parsePattern ( String pattern , boolean escaped , char escape ) { } }
char [ ] accum = new char [ pattern . length ( ) ] ; int finger = 0 ; List tokens = new ArrayList ( ) ; boolean trivial = true ; for ( int i = 0 ; i < pattern . length ( ) ; i ++ ) { char c = pattern . charAt ( i ) ; if ( c == sqlMatchOne ) { finger = flush ( accum , finger , tokens ) ; tokens . add ( matchOne ) ; trivial = false ; } else if ( c == sqlMatchMany ) { finger = flush ( accum , finger , tokens ) ; tokens . add ( matchMany ) ; trivial = false ; } else if ( escaped && c == escape ) if ( i == pattern . length ( ) - 1 ) return null ; else { i ++ ; accum [ finger ++ ] = pattern . charAt ( i ) ; } else accum [ finger ++ ] = c ; } if ( trivial ) return new String ( accum , 0 , finger ) ; flush ( accum , finger , tokens ) ; if ( tokens . size ( ) == 1 && tokens . get ( 0 ) == matchMany ) return matchMany ; return new Pattern ( tokens . iterator ( ) ) ;
public class ThreadUtils { /** * A null - safe method for getting the Thread ' s name . * @ param thread the Thread from which the name is returned . * @ return a String indicating the name of the specified Thread or null if the Thread is null . * @ see java . lang . Thread # getName ( ) */ @ NullSafe public static String getName ( Thread thread ) { } }
return ( thread != null ? thread . getName ( ) : null ) ;
public class ReflectionUtils { /** * Get target object field value * @ param target target object * @ param fieldName field name * @ param < T > field type * @ return field value */ public static < T > T getFieldValue ( Object target , String fieldName ) { } }
try { Field field = target . getClass ( ) . getDeclaredField ( fieldName ) ; field . setAccessible ( true ) ; @ SuppressWarnings ( "unchecked" ) T returnValue = ( T ) field . get ( target ) ; return returnValue ; } catch ( NoSuchFieldException e ) { throw handleException ( fieldName , e ) ; } catch ( IllegalAccessException e ) { throw handleException ( fieldName , e ) ; }
public class ConsumerSessionImpl { /** * Gets the forwardScanning setting * ( Used for MQ - like behaviour ) * @ return the forwardScanning property of the session */ public boolean getForwardScanning ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getForwardScanning" ) ; SibTr . exit ( tc , "getForwardScanning" , Boolean . valueOf ( _forwardScanning ) ) ; } return _forwardScanning ;
public class IotHubResourcesInner { /** * Exports all the device identities in the IoT hub identity registry to an Azure Storage blob container . For more information , see : https : / / docs . microsoft . com / azure / iot - hub / iot - hub - devguide - identity - registry # import - and - export - device - identities . * Exports all the device identities in the IoT hub identity registry to an Azure Storage blob container . For more information , see : https : / / docs . microsoft . com / azure / iot - hub / iot - hub - devguide - identity - registry # import - and - export - device - identities . * @ param resourceGroupName The name of the resource group that contains the IoT hub . * @ param resourceName The name of the IoT hub . * @ param exportDevicesParameters The parameters that specify the export devices operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the JobResponseInner object */ public Observable < JobResponseInner > exportDevicesAsync ( String resourceGroupName , String resourceName , ExportDevicesRequest exportDevicesParameters ) { } }
return exportDevicesWithServiceResponseAsync ( resourceGroupName , resourceName , exportDevicesParameters ) . map ( new Func1 < ServiceResponse < JobResponseInner > , JobResponseInner > ( ) { @ Override public JobResponseInner call ( ServiceResponse < JobResponseInner > response ) { return response . body ( ) ; } } ) ;
public class Action { /** * Creates a composite action which contains all passed actions and * executes them in the same order . */ public static Action composite ( final Collection < Applicable > applicables ) { } }
return new Action ( input -> { for ( Applicable action : applicables ) { action . apply ( input ) ; } return input ; } ) ;
public class ClassFile { /** * Recurse depth - first so order of declaring types is correct . */ private static void appendDeclaringTypes ( TypeReference typeRef , char innerClassDelimiter , StringBuilder sb ) { } }
TypeReference declaringType = typeRef . getDeclaringType ( ) ; if ( declaringType != null ) { appendDeclaringTypes ( declaringType , innerClassDelimiter , sb ) ; sb . append ( declaringType . getSimpleName ( ) ) ; sb . append ( innerClassDelimiter ) ; }
public class MetaFilter { /** * Creates a MetaMatcher based on the filter content . * @ param filterAsString the String representation of the filter * @ param metaMatchers the Map of custom MetaMatchers * @ return A MetaMatcher used to match the filter content */ protected MetaMatcher createMetaMatcher ( String filterAsString , Map < String , MetaMatcher > metaMatchers ) { } }
for ( String key : metaMatchers . keySet ( ) ) { if ( filterAsString . startsWith ( key ) ) { return metaMatchers . get ( key ) ; } } if ( filterAsString . startsWith ( GROOVY ) ) { return new GroovyMetaMatcher ( ) ; } return new DefaultMetaMatcher ( ) ;
public class DefaultGroovyMethods { /** * Overloads the left shift operator to provide an easy way to append * objects to a SortedSet . * < pre class = " groovyTestCase " > def set = [ 1,2 ] as SortedSet * set & lt ; & lt ; 3 * assert set = = [ 1,2,3 ] as SortedSet < / pre > * @ param self a SortedSet * @ param value an Object to be added to the SortedSet . * @ return same SortedSet , after the value was added to it . * @ since 2.4.0 */ public static < T > SortedSet < T > leftShift ( SortedSet < T > self , T value ) { } }
return ( SortedSet < T > ) leftShift ( ( Collection < T > ) self , value ) ;
public class MarkLogicClientImpl { /** * set databaseclient and instantate related managers * @ param databaseClient */ private void setDatabaseClient ( DatabaseClient databaseClient ) { } }
this . databaseClient = databaseClient ; this . sparqlManager = getDatabaseClient ( ) . newSPARQLQueryManager ( ) ; this . graphManager = getDatabaseClient ( ) . newGraphManager ( ) ;
public class Style { /** * Parse padding with string like ' [ 10,10,10,10 ] ' * @ param paddingString */ public void setPadding ( @ Nullable String paddingString ) { } }
if ( ! TextUtils . isEmpty ( paddingString ) ) { // remove leading and ending ' [ ' ' ] ' try { paddingString = paddingString . trim ( ) . substring ( 1 , paddingString . length ( ) - 1 ) ; String paddingStringArray [ ] = paddingString . split ( "," ) ; int size = paddingStringArray . length > 4 ? 4 : paddingStringArray . length ; for ( int i = 0 ; i < size ; i ++ ) { String paddingStr = paddingStringArray [ i ] ; try { if ( ! TextUtils . isEmpty ( paddingStr ) ) { padding [ i ] = parseSize ( paddingStr . trim ( ) . replace ( "\"" , "" ) , 0 ) ; } else { padding [ i ] = 0 ; } } catch ( NumberFormatException ignored ) { padding [ i ] = 0 ; } } Arrays . fill ( padding , size , padding . length , padding [ size - 1 ] ) ; } catch ( Exception e ) { Arrays . fill ( padding , 0 ) ; } }
public class AddKeyValueStrength { /** * Adds the reference strength methods for the key or value . */ private void addStrength ( String collectName , String queueName , TypeName type ) { } }
context . cache . addMethod ( MethodSpec . methodBuilder ( queueName ) . addModifiers ( context . protectedFinalModifiers ( ) ) . returns ( type ) . addStatement ( "return $N" , queueName ) . build ( ) ) ; context . cache . addField ( FieldSpec . builder ( type , queueName , Modifier . FINAL ) . initializer ( "new $T()" , type ) . build ( ) ) ; context . cache . addMethod ( MethodSpec . methodBuilder ( collectName ) . addModifiers ( context . protectedFinalModifiers ( ) ) . addStatement ( "return true" ) . returns ( boolean . class ) . build ( ) ) ;
public class ContractsApi { /** * Get corporation contract bids Lists bids on a particular auction contract * - - - This route is cached for up to 3600 seconds SSO Scope : * esi - contracts . read _ corporation _ contracts . v1 * @ param contractId * ID of a contract ( required ) * @ param corporationId * An EVE corporation ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param token * Access token to use if unable to set a header ( optional ) * @ return ApiResponse & lt ; List & lt ; CorporationContractsBidsResponse & gt ; & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public ApiResponse < List < CorporationContractsBidsResponse > > getCorporationsCorporationIdContractsContractIdBidsWithHttpInfo ( Integer contractId , Integer corporationId , String datasource , String ifNoneMatch , Integer page , String token ) throws ApiException { } }
com . squareup . okhttp . Call call = getCorporationsCorporationIdContractsContractIdBidsValidateBeforeCall ( contractId , corporationId , datasource , ifNoneMatch , page , token , null ) ; Type localVarReturnType = new TypeToken < List < CorporationContractsBidsResponse > > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class Controller { /** * Returns the value of a request parameter and convert to Integer with a default value if it is null . * @ param name a String specifying the name of the parameter * @ return a Integer representing the single value of the parameter */ public Integer getParaToInt ( String name , Integer defaultValue ) { } }
return toInt ( request . getParameter ( name ) , defaultValue ) ;
public class ClassResource { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( CLASS_RESOURCE_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class UserEntity { /** * Set the roles assigned to the current user . * @ param inRoles A list of roles to be assigned to the user . */ @ Override public void setRoles ( List < RoleEntity > inRoles ) { } }
if ( inRoles == null ) { this . roles = new ArrayList < > ( ) ; } else { this . roles = new ArrayList < > ( inRoles ) ; } this . roles = inRoles ;
public class ExpressRouteGatewaysInner { /** * Creates or updates a ExpressRoute gateway in a specified resource group . * @ param resourceGroupName The name of the resource group . * @ param expressRouteGatewayName The name of the ExpressRoute gateway . * @ param putExpressRouteGatewayParameters Parameters required in an ExpressRoute gateway PUT operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ExpressRouteGatewayInner > createOrUpdateAsync ( String resourceGroupName , String expressRouteGatewayName , ExpressRouteGatewayInner putExpressRouteGatewayParameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , expressRouteGatewayName , putExpressRouteGatewayParameters ) . map ( new Func1 < ServiceResponse < ExpressRouteGatewayInner > , ExpressRouteGatewayInner > ( ) { @ Override public ExpressRouteGatewayInner call ( ServiceResponse < ExpressRouteGatewayInner > response ) { return response . body ( ) ; } } ) ;
public class InfluxDBImpl { /** * { @ inheritDoc } */ @ Override public void query ( final Query query , final int chunkSize , final BiConsumer < Cancellable , QueryResult > onNext ) { } }
query ( query , chunkSize , onNext , ( ) -> { } ) ;
public class DescribeKeyPairsResult { /** * Information about the key pairs . * @ return Information about the key pairs . */ public java . util . List < KeyPairInfo > getKeyPairs ( ) { } }
if ( keyPairs == null ) { keyPairs = new com . amazonaws . internal . SdkInternalList < KeyPairInfo > ( ) ; } return keyPairs ;
public class Transform2D { /** * Concatenates this transform with a scaling transformation . * < p > This function is equivalent to : * < pre > * this = this * [ s 0 0 ] * [ 0 s 0 ] * [ 0 0 1 ] * < / pre > * @ param scale the scaling factor . */ public void scale ( double scale ) { } }
this . m00 *= scale ; this . m11 *= scale ; this . m01 *= scale ; this . m10 *= scale ;
public class DescribeCapacityReservationsResult { /** * Information about the Capacity Reservations . * @ return Information about the Capacity Reservations . */ public java . util . List < CapacityReservation > getCapacityReservations ( ) { } }
if ( capacityReservations == null ) { capacityReservations = new com . amazonaws . internal . SdkInternalList < CapacityReservation > ( ) ; } return capacityReservations ;
public class SecurityActions { /** * Get a Subject instance * @ param subjectFactory The subject factory * @ param domain The domain * @ param mcf The ManagedConnectionFactory * @ return The instance */ static Subject createSubject ( final SubjectFactory subjectFactory , final String domain , final ManagedConnectionFactory mcf ) { } }
if ( System . getSecurityManager ( ) == null ) { Subject subject = subjectFactory . createSubject ( domain ) ; Set < PasswordCredential > s = getPasswordCredentials ( subject ) ; if ( s != null && ! s . isEmpty ( ) ) { for ( PasswordCredential pc : s ) { pc . setManagedConnectionFactory ( mcf ) ; } } return subject ; } return AccessController . doPrivileged ( new PrivilegedAction < Subject > ( ) { public Subject run ( ) { Subject subject = subjectFactory . createSubject ( domain ) ; Set < PasswordCredential > s = getPasswordCredentials ( subject ) ; if ( s != null && ! s . isEmpty ( ) ) { for ( PasswordCredential pc : s ) { pc . setManagedConnectionFactory ( mcf ) ; } } return subject ; } } ) ;
public class XlsWorkbook { /** * Sets the cell attributes from the given column . * @ param cellFormat The dell to set the attributes on * @ param column The column definition to take the attributes from */ public void setCellFormatAttributes ( WritableCellFormat cellFormat , FileColumn column ) { } }
try { if ( cellFormat != null && column != null ) { Alignment a = Alignment . GENERAL ; short align = column . getAlign ( ) ; if ( align == FileColumn . ALIGN_CENTRE ) a = Alignment . CENTRE ; else if ( align == FileColumn . ALIGN_LEFT ) a = Alignment . LEFT ; else if ( align == FileColumn . ALIGN_RIGHT ) a = Alignment . RIGHT ; else if ( align == FileColumn . ALIGN_JUSTIFY ) a = Alignment . JUSTIFY ; else if ( align == FileColumn . ALIGN_FILL ) a = Alignment . FILL ; cellFormat . setAlignment ( a ) ; cellFormat . setWrap ( column . getWrap ( ) ) ; } } catch ( WriteException e ) { logger . severe ( StringUtilities . serialize ( e ) ) ; }
public class AsynchronousRequest { /** * For more info on WvW ranks API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / wvw / ranks " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws NullPointerException if given { @ link Callback } is empty * @ see WvWRank WvW rank info */ public void getAllWvWRankID ( Callback < List < Integer > > callback ) throws NullPointerException { } }
gw2API . getAllWvWRankIDs ( ) . enqueue ( callback ) ;
public class UnionPayApi { /** * 后台请求返回String * @ param reqData * 请求参数 * @ return { String } */ public static String backRequest ( Map < String , String > reqData ) { } }
return HttpUtils . post ( SDKConfig . getConfig ( ) . getBackRequestUrl ( ) , reqData ) ;
public class GCCBEZRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . GCCBEZRG__XPOS : setXPOS ( ( Integer ) newValue ) ; return ; case AfplibPackage . GCCBEZRG__YPOS : setYPOS ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class JMElasticsearchIndex { /** * Send data async action future . * @ param jsonSource the json source * @ param index the index * @ param type the type * @ param id the id * @ return the action future */ public ActionFuture < IndexResponse > sendDataAsync ( String jsonSource , String index , String type , String id ) { } }
return indexQueryAsync ( buildIndexRequest ( jsonSource , index , type , id ) ) ;
public class NumberUtils { /** * < p > Convert a < code > String < / code > to a < code > float < / code > , returning a * default value if the conversion fails . < / p > * < p > If the string < code > str < / code > is < code > null < / code > , the default * value is returned . < / p > * < pre > * NumberUtils . toFloat ( null , 1.1f ) = 1.0f * NumberUtils . toFloat ( " " , 1.1f ) = 1.1f * NumberUtils . toFloat ( " 1.5 " , 0.0f ) = 1.5f * < / pre > * @ param str the string to convert , may be < code > null < / code > * @ param defaultValue the default value * @ return the float represented by the string , or defaultValue * if conversion fails * @ since 2.1 */ public static float toFloat ( final String str , final float defaultValue ) { } }
if ( str == null ) { return defaultValue ; } try { return Float . parseFloat ( str ) ; } catch ( final NumberFormatException nfe ) { return defaultValue ; }
public class IntStreamEx { /** * Returns an infinite sequential ordered { @ code IntStreamEx } produced by * iterative application of a function { @ code f } to an initial element * { @ code seed } , producing a stream consisting of { @ code seed } , * { @ code f ( seed ) } , { @ code f ( f ( seed ) ) } , etc . * The first element ( position { @ code 0 } ) in the { @ code IntStreamEx } will be * the provided { @ code seed } . For { @ code n > 0 } , the element at position * { @ code n } , will be the result of applying the function { @ code f } to the * element at position { @ code n - 1 } . * @ param seed the initial element * @ param f a function to be applied to to the previous element to produce a * new element * @ return A new sequential { @ code IntStream } * @ see # iterate ( int , IntPredicate , IntUnaryOperator ) */ public static IntStreamEx iterate ( final int seed , final IntUnaryOperator f ) { } }
return iterate ( seed , x -> true , f ) ;
public class MathUtils { /** * How much of the variance is NOT explained by the regression * @ param predictedValues predicted values * @ param targetAttribute data for target attribute * @ return the sum squares of regression */ public static double ssError ( double [ ] predictedValues , double [ ] targetAttribute ) { } }
double ret = 0 ; for ( int i = 0 ; i < predictedValues . length ; i ++ ) { ret += Math . pow ( targetAttribute [ i ] - predictedValues [ i ] , 2 ) ; } return ret ;
public class V1Annotation { /** * exposed for conversion */ public static V1Annotation create ( long timestamp , String value , @ Nullable Endpoint endpoint ) { } }
return new V1Annotation ( timestamp , value , endpoint ) ;
public class RocksDBIncrementalRestoreOperation { /** * This recreates the new working directory of the recovered RocksDB instance and links / copies the contents from * a local state . */ private void restoreInstanceDirectoryFromPath ( Path source , String instanceRocksDBPath ) throws IOException { } }
FileSystem fileSystem = source . getFileSystem ( ) ; final FileStatus [ ] fileStatuses = fileSystem . listStatus ( source ) ; if ( fileStatuses == null ) { throw new IOException ( "Cannot list file statues. Directory " + source + " does not exist." ) ; } for ( FileStatus fileStatus : fileStatuses ) { final Path filePath = fileStatus . getPath ( ) ; final String fileName = filePath . getName ( ) ; File restoreFile = new File ( source . getPath ( ) , fileName ) ; File targetFile = new File ( instanceRocksDBPath , fileName ) ; if ( fileName . endsWith ( SST_FILE_SUFFIX ) ) { // hardlink ' ing the immutable sst - files . Files . createLink ( targetFile . toPath ( ) , restoreFile . toPath ( ) ) ; } else { // true copy for all other files . Files . copy ( restoreFile . toPath ( ) , targetFile . toPath ( ) , StandardCopyOption . REPLACE_EXISTING ) ; } }
public class GoogleCodingConvention { /** * { @ inheritDoc } * < p > This enforces the Google const name convention , that the first character * after the last $ must be an upper - case letter and all subsequent letters * must be upper case . The name must be at least 2 characters long . * < p > Examples : * < pre > * aaa Not constant - lower - case letters in the name * A Not constant - too short * goog $ A Constant - letters after the $ are upper - case . * AA17 Constant - digits can appear after the first letter * goog $ 7A Not constant - first character after the $ must be * upper case . * $ A Constant - doesn ' t have to be anything in front of the $ * < / pre > */ @ Override public boolean isConstant ( String name ) { } }
if ( name . length ( ) <= 1 ) { return false ; } // In compiled code , ' $ ' is often a namespace delimiter . To allow inlining // of namespaced constants , we strip off any namespaces here . int pos = name . lastIndexOf ( '$' ) ; if ( pos >= 0 ) { name = name . substring ( pos + 1 ) ; if ( name . isEmpty ( ) ) { return false ; } } return isConstantKey ( name ) ;
public class NodeFilter { /** * { @ inheritDoc } */ @ Override public final boolean filter ( ) { } }
return ( getNode ( ) . getKind ( ) == IConstants . ELEMENT || getNode ( ) . getKind ( ) == IConstants . TEXT ) ;
public class EntityBeanTypeImpl { /** * If not already created , a new < code > security - role - ref < / code > element will be created and returned . * Otherwise , the first existing < code > security - role - ref < / code > element will be returned . * @ return the instance defined for the element < code > security - role - ref < / code > */ public SecurityRoleRefType < EntityBeanType < T > > getOrCreateSecurityRoleRef ( ) { } }
List < Node > nodeList = childNode . get ( "security-role-ref" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new SecurityRoleRefTypeImpl < EntityBeanType < T > > ( this , "security-role-ref" , childNode , nodeList . get ( 0 ) ) ; } return createSecurityRoleRef ( ) ;
public class WebApplicationExceptionMapper { /** * Maps an unhandled { @ link WebApplicationException } to a { @ link Response } . * @ param exception the { @ link WebApplicationException } that was not handled * @ return a { @ link Response } object with a status of the { @ link WebApplicationException } or 500 * if the exception ' s response is null , content - type of ' application / json ' , and a * { @ link JsonError } entity containing details about the unhandled exception in JSON format */ @ Override public Response toResponse ( WebApplicationException exception ) { } }
Response response = exception . getResponse ( ) ; int code = response == null ? INTERNAL_SERVER_ERROR . getStatusCode ( ) : response . getStatus ( ) ; if ( code >= 400 && code < 500 ) { logger . warn ( "An unhandled exception was thrown." , exception ) ; } else if ( code >= 500 ) { logger . error ( "An unhandled exception was thrown." , exception ) ; } return Optional . ofNullable ( exception . getResponse ( ) ) . map ( Response :: fromResponse ) . orElse ( Response . status ( code ) ) . entity ( JsonError . builder ( ) . code ( code ) . message ( getMessage ( code ) ) . build ( ) ) . type ( MediaType . APPLICATION_JSON ) . build ( ) ;
public class CmsDriverManager { /** * Reads all property objects mapped to a specified resource from the database . < p > * All properties in the result List will be in frozen ( read only ) state , so you can ' t change the values . < p > * Returns an empty list if no properties are found at all . < p > * @ param dbc the current database context * @ param resource the resource where the properties are read from * @ param search true , if the properties should be searched on all parent folders if not found on the resource * @ return a list of CmsProperty objects containing the structure and / or resource value * @ throws CmsException if something goes wrong * @ see CmsObject # readPropertyObjects ( String , boolean ) */ public List < CmsProperty > readPropertyObjects ( CmsDbContext dbc , CmsResource resource , boolean search ) throws CmsException { } }
// check if we have the result already cached CmsUUID projectId = getProjectIdForContext ( dbc ) ; String cacheKey = getCacheKey ( CACHE_ALL_PROPERTIES , search , projectId , resource . getRootPath ( ) ) ; List < CmsProperty > properties = m_monitor . getCachedPropertyList ( cacheKey ) ; if ( ( properties == null ) || ! dbc . getProjectId ( ) . isNullUUID ( ) ) { // result not cached , let ' s look it up in the DB if ( search ) { boolean cont ; properties = new ArrayList < CmsProperty > ( ) ; List < CmsProperty > parentProperties = null ; do { try { parentProperties = readPropertyObjects ( dbc , resource , false ) ; // make sure properties from lower folders " overwrite " properties from upper folders parentProperties . removeAll ( properties ) ; parentProperties . addAll ( properties ) ; properties . clear ( ) ; properties . addAll ( parentProperties ) ; cont = resource . getRootPath ( ) . length ( ) > 1 ; } catch ( CmsSecurityException se ) { // a security exception ( probably no read permission ) we return the current result cont = false ; } if ( cont ) { // no permission check on parent folder is required since we must have " read " // permissions to read the child resource anyway resource = readResource ( dbc , CmsResource . getParentFolder ( resource . getRootPath ( ) ) , CmsResourceFilter . ALL ) ; } } while ( cont ) ; } else { properties = getVfsDriver ( dbc ) . readPropertyObjects ( dbc , dbc . currentProject ( ) , resource ) ; // for ( CmsProperty prop : properties ) { // prop . setOrigin ( resource . getRootPath ( ) ) ; } // set all properties in the result list as frozen CmsProperty . setFrozen ( properties ) ; if ( dbc . getProjectId ( ) . isNullUUID ( ) ) { // store the result in the cache if needed m_monitor . cachePropertyList ( cacheKey , properties ) ; } } return new ArrayList < CmsProperty > ( properties ) ;
public class FibonacciHeap { /** * Cuts this entry from its parent and adds it to the root list , and then * does the same for its parent , and so on up the tree . * Runtime : O ( log n ) */ private void cutAndMakeRoot ( Entry entry ) { } }
Entry oParent = entry . oParent ; if ( oParent == null ) return ; // already a root oParent . degree -- ; entry . isMarked = false ; // update parent ' s ` oFirstChild ` pointer Entry oFirstChild = oParent . oFirstChild ; assert oFirstChild != null ; if ( oFirstChild . equals ( entry ) ) { if ( oParent . degree == 0 ) { oParent . oFirstChild = null ; } else { oParent . oFirstChild = entry . next ; } } entry . oParent = null ; unlinkFromNeighbors ( entry ) ; // add to root list mergeLists ( entry , oMinEntry ) ; if ( oParent . oParent != null ) { if ( oParent . isMarked ) { cutAndMakeRoot ( oParent ) ; } else { oParent . isMarked = true ; } }
public class ExceptionMasker { /** * long */ public LongConsumer mask ( ThrowingLongConsumer < ? extends X > consumer ) { } }
Objects . requireNonNull ( consumer ) ; return l -> maskException ( ( ) -> consumer . accept ( l ) ) ;
public class HttpServlets { /** * 附加消息内容 。 * @ param request * 请求 * @ param message * 消息内容 */ public static void addMessage ( final HttpServletRequest request , final String message ) { } }
StringBuilder sb = ( StringBuilder ) request . getAttribute ( MESSAGE_KEY ) ; if ( sb == null ) { sb = new StringBuilder ( ) ; request . setAttribute ( MESSAGE_KEY , sb ) ; } sb . append ( message ) ;
public class Flipper { /** * Performs on the nested function swapping former and latter formal * parameters . * @ param former the former formal parameter used as latter in the nested * function * @ param latter the latter formal parameter used as former in the nested * function * @ return the result of the function */ @ Override public R apply ( T former , U latter ) { } }
return function . apply ( latter , former ) ;
public class TaskInProgress { /** * Indicate that one of the taskids in this TaskInProgress * has successfully completed ! */ public void completed ( TaskAttemptID taskid ) { } }
// Record that this taskid is complete completedTask ( taskid , TaskStatus . State . SUCCEEDED ) ; // Note the successful taskid setSuccessfulTaskid ( taskid ) ; // Now that the TIP is complete , the other speculative // subtasks will be closed when the owning tasktracker // reports in and calls shouldClose ( ) on this object . this . completes ++ ; this . execFinishTime = JobTracker . getClock ( ) . getTime ( ) ; recomputeProgress ( ) ;
public class ImmutableList { /** * Patches the current list . Patching a list first takes the first < code > index < / code > elements * then concatenates it with < code > replacements < / code > and then concatenates it with the * original list dropping < code > index + patchLength < / code > elements . < p > A visualization of this * operation is to replace the < code > patchLength < / code > elements in the list starting from * < code > index < / code > with a list of new elements given by < code > replacements < / code > . * @ param index The index to start patching . * @ param patchLength The length to patch . * @ param replacements The replacements of the patch . * @ param < B > The type of the replacements . It must be A or a subtype of A . * @ return The patched list . */ @ Nonnull public < B extends A > ImmutableList < A > patch ( int index , int patchLength , @ Nonnull ImmutableList < B > replacements ) { } }
return this . take ( index ) . append ( replacements ) . append ( this . drop ( index + patchLength ) ) ;
public class StreamInterceptingTunnel { /** * Intercept all data received along the stream having the given index , * writing that data to the given OutputStream . The OutputStream will * automatically be closed when the stream ends . If there is no such * stream , then the OutputStream will be closed immediately . This function * will block until all received data has been written to the OutputStream * and the OutputStream has been closed . * @ param index * The index of the stream to intercept . * @ param stream * The OutputStream to write all intercepted data to . * @ throws GuacamoleException * If an error occurs while intercepting the stream , or if the stream * itself reports an error . */ public void interceptStream ( int index , OutputStream stream ) throws GuacamoleException { } }
// Log beginning of intercepted stream logger . debug ( "Intercepting output stream #{} of tunnel \"{}\"." , index , getUUID ( ) ) ; try { outputStreamFilter . interceptStream ( index , new BufferedOutputStream ( stream ) ) ; } // Log end of intercepted stream finally { logger . debug ( "Intercepted output stream #{} of tunnel \"{}\" ended." , index , getUUID ( ) ) ; }
public class CancelSpotInstanceRequestsResult { /** * One or more Spot Instance requests . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCancelledSpotInstanceRequests ( java . util . Collection ) } or * { @ link # withCancelledSpotInstanceRequests ( java . util . Collection ) } if you want to override the existing values . * @ param cancelledSpotInstanceRequests * One or more Spot Instance requests . * @ return Returns a reference to this object so that method calls can be chained together . */ public CancelSpotInstanceRequestsResult withCancelledSpotInstanceRequests ( CancelledSpotInstanceRequest ... cancelledSpotInstanceRequests ) { } }
if ( this . cancelledSpotInstanceRequests == null ) { setCancelledSpotInstanceRequests ( new com . amazonaws . internal . SdkInternalList < CancelledSpotInstanceRequest > ( cancelledSpotInstanceRequests . length ) ) ; } for ( CancelledSpotInstanceRequest ele : cancelledSpotInstanceRequests ) { this . cancelledSpotInstanceRequests . add ( ele ) ; } return this ;
public class AbstractSpreadSheetDocumentRecordWriter { /** * Reads the ( private ) key and certificate from keystore to sign * @ param conf * @ throws OfficeWriterException * @ throws IOException */ private void readSigningKeyAndCertificate ( Configuration conf ) throws OfficeWriterException , IOException { } }
if ( ( this . howc . getSigKeystoreFile ( ) != null ) && ( ! "" . equals ( this . howc . getSigKeystoreFile ( ) ) ) ) { LOG . info ( "Signing document" ) ; if ( ( this . howc . getSigKeystoreAlias ( ) == null ) || ( "" . equals ( this . howc . getSigKeystoreAlias ( ) ) ) ) { LOG . error ( "Keystore alias for signature keystore not defined. Cannot sign document" ) ; throw new OfficeWriterException ( "Keystore alias for signature keystore not defined. Cannot sign document" ) ; } if ( ( this . howc . getSigKeystoreType ( ) == null ) || ( "" . equals ( this . howc . getSigKeystoreType ( ) ) ) ) { LOG . error ( "Keystore type for signature keystore not defined. Cannot sign document" ) ; throw new OfficeWriterException ( "Keystore type for signature keystore not defined. Cannot sign document" ) ; } LOG . info ( "Reading keystore" ) ; HadoopKeyStoreManager hksm = new HadoopKeyStoreManager ( conf ) ; try { hksm . openKeyStore ( new Path ( this . howc . getSigKeystoreFile ( ) ) , this . howc . getSigKeystoreType ( ) , this . howc . getSigKeystorePassword ( ) ) ; this . howc . setSigKey ( hksm . getPrivateKey ( this . howc . getSigKeystoreAlias ( ) , this . howc . getSigKeystorePassword ( ) ) ) ; this . howc . setSigCertificate ( ( X509Certificate ) hksm . getCertificate ( this . howc . getSigKeystoreAlias ( ) ) ) ; } catch ( NoSuchAlgorithmException | CertificateException | KeyStoreException | IllegalArgumentException | UnrecoverableKeyException e ) { LOG . error ( "Cannopt read signing certificate. Exception: " , e ) ; throw new OfficeWriterException ( "Cannot read keystore to obtain key and certificate for signing " + e ) ; } }
public class BeanPersistenceDelegate { /** * PersistenceDelegate . initialize ( ) */ protected void initialize ( Class < ? > type , Object oldInstance , Object newInstance , Encoder out ) { } }
// Get the bean and associated beanInfo for the source instance ControlBean control = ( ControlBean ) oldInstance ; BeanInfo beanInfo ; try { beanInfo = Introspector . getBeanInfo ( control . getClass ( ) ) ; } catch ( IntrospectionException ie ) { throw new ControlException ( "Unable to locate BeanInfo" , ie ) ; } // Cast the encoding stream to an XMLEncoder ( only encoding supported ) and then set // the stream owner to the bean being persisted XMLEncoder xmlOut = ( XMLEncoder ) out ; Object owner = xmlOut . getOwner ( ) ; xmlOut . setOwner ( control ) ; try { // The default implementation of property persistence will use BeanInfo to // incrementally compare oldInstance property values to newInstance property values . // Because the bean instance PropertyMap holds only the values that have been // modified , this process can be optimized by directly writing out only the properties // found in the map . BeanPropertyMap beanMap = control . getPropertyMap ( ) ; PropertyDescriptor [ ] propDescriptors = beanInfo . getPropertyDescriptors ( ) ; for ( PropertyKey pk : beanMap . getPropertyKeys ( ) ) { // Locate the PropertyDescriptor for the modified property , and use it to write // the property value to the encoder stream String propName = pk . getPropertyName ( ) ; boolean found = false ; for ( int i = 0 ; i < propDescriptors . length ; i ++ ) { if ( propName . equals ( propDescriptors [ i ] . getName ( ) ) ) { found = true ; // Only write the property if it is not flagged as transient Object transientVal = propDescriptors [ i ] . getValue ( "transient" ) ; if ( transientVal == null || transientVal . equals ( Boolean . FALSE ) ) { xmlOut . writeStatement ( new Statement ( oldInstance , propDescriptors [ i ] . getWriteMethod ( ) . getName ( ) , new Object [ ] { beanMap . getProperty ( pk ) } ) ) ; } } } if ( found == false ) { throw new ControlException ( "Unknown property in bean PropertyMap: " + pk ) ; } } // Get the bean context associated with the bean , and persist any nested controls ControlBeanContext cbc = control . getControlBeanContext ( ) ; if ( cbc . size ( ) != 0 ) { xmlOut . setPersistenceDelegate ( ControlBeanContext . class , new ContextPersistenceDelegate ( ) ) ; Iterator nestedIter = cbc . iterator ( ) ; while ( nestedIter . hasNext ( ) ) { Object bean = nestedIter . next ( ) ; if ( bean instanceof ControlBean ) { xmlOut . writeStatement ( new Statement ( cbc , "add" , new Object [ ] { bean } ) ) ; } } } // Restore any listeners associated with the control EventSetDescriptor [ ] eventSetDescriptors = beanInfo . getEventSetDescriptors ( ) ; for ( int i = 0 ; i < eventSetDescriptors . length ; i ++ ) { EventSetDescriptor esd = eventSetDescriptors [ i ] ; Method listenersMethod = esd . getGetListenerMethod ( ) ; String addListenerName = esd . getAddListenerMethod ( ) . getName ( ) ; if ( listenersMethod != null ) { // Get the list of listeners , and then add statements to incrementally // add them in the same order try { Object [ ] lstnrs = ( Object [ ] ) listenersMethod . invoke ( control , new Object [ ] { } ) ; for ( int j = 0 ; j < lstnrs . length ; j ++ ) { // If this is a generated EventAdaptor class , then set the delegate // explicitly if ( lstnrs [ j ] instanceof EventAdaptor ) xmlOut . setPersistenceDelegate ( lstnrs [ j ] . getClass ( ) , new AdaptorPersistenceDelegate ( ) ) ; xmlOut . writeStatement ( new Statement ( control , addListenerName , new Object [ ] { lstnrs [ j ] } ) ) ; } } catch ( Exception iae ) { throw new ControlException ( "Unable to initialize listeners" , iae ) ; } } } // See if the control holds an implementation instance , if so , we need to include // it ( and any nested controls or state ) in the encoding stream Object impl = control . getImplementation ( ) ; if ( impl != null ) { // Set the persistence delegate for the impl class to the Impl delegate , // set the current stream owner to the bean , and then write the implementation Class implClass = impl . getClass ( ) ; if ( xmlOut . getPersistenceDelegate ( implClass ) instanceof DefaultPersistenceDelegate ) xmlOut . setPersistenceDelegate ( implClass , new ImplPersistenceDelegate ( ) ) ; // HACK : This bit of hackery pushes the impl into the persistence stream // w / out actually requiring it be used as an argument elsewhere , since there // is no public API on the bean that takes an impl instance as an argument . xmlOut . writeStatement ( new Statement ( impl , "toString" , null ) ) ; } } finally { // Restore the previous encoding stream owner xmlOut . setOwner ( owner ) ; }
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 1188:1 : ruleXRelationalExpression returns [ EObject current = null ] : ( this _ XOtherOperatorExpression _ 0 = ruleXOtherOperatorExpression ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * ) ; */ public final EObject ruleXRelationalExpression ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_2 = null ; EObject this_XOtherOperatorExpression_0 = null ; EObject lv_type_3_0 = null ; EObject lv_rightOperand_6_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 1194:2 : ( ( this _ XOtherOperatorExpression _ 0 = ruleXOtherOperatorExpression ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * ) ) // InternalXbaseWithAnnotations . g : 1195:2 : ( this _ XOtherOperatorExpression _ 0 = ruleXOtherOperatorExpression ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * ) { // InternalXbaseWithAnnotations . g : 1195:2 : ( this _ XOtherOperatorExpression _ 0 = ruleXOtherOperatorExpression ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * ) // InternalXbaseWithAnnotations . g : 1196:3 : this _ XOtherOperatorExpression _ 0 = ruleXOtherOperatorExpression ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXRelationalExpressionAccess ( ) . getXOtherOperatorExpressionParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_21 ) ; this_XOtherOperatorExpression_0 = ruleXOtherOperatorExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XOtherOperatorExpression_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalXbaseWithAnnotations . g : 1204:3 : ( ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) | ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) ) * loop21 : do { int alt21 = 3 ; switch ( input . LA ( 1 ) ) { case 26 : { int LA21_2 = input . LA ( 2 ) ; if ( ( synpred12_InternalXbaseWithAnnotations ( ) ) ) { alt21 = 2 ; } } break ; case 27 : { int LA21_3 = input . LA ( 2 ) ; if ( ( synpred12_InternalXbaseWithAnnotations ( ) ) ) { alt21 = 2 ; } } break ; case 35 : { int LA21_4 = input . LA ( 2 ) ; if ( ( synpred11_InternalXbaseWithAnnotations ( ) ) ) { alt21 = 1 ; } } break ; case 28 : { int LA21_5 = input . LA ( 2 ) ; if ( ( synpred12_InternalXbaseWithAnnotations ( ) ) ) { alt21 = 2 ; } } break ; } switch ( alt21 ) { case 1 : // InternalXbaseWithAnnotations . g : 1205:4 : ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) { // InternalXbaseWithAnnotations . g : 1205:4 : ( ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) ) // InternalXbaseWithAnnotations . g : 1206:5 : ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) { // InternalXbaseWithAnnotations . g : 1206:5 : ( ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) ) // InternalXbaseWithAnnotations . g : 1207:6 : ( ( ( ) ' instanceof ' ) ) = > ( ( ) otherlv _ 2 = ' instanceof ' ) { // InternalXbaseWithAnnotations . g : 1213:6 : ( ( ) otherlv _ 2 = ' instanceof ' ) // InternalXbaseWithAnnotations . g : 1214:7 : ( ) otherlv _ 2 = ' instanceof ' { // InternalXbaseWithAnnotations . g : 1214:7 : ( ) // InternalXbaseWithAnnotations . g : 1215:8: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXRelationalExpressionAccess ( ) . getXInstanceOfExpressionExpressionAction_1_0_0_0_0 ( ) , current ) ; } } otherlv_2 = ( Token ) match ( input , 35 , FOLLOW_22 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_2 , grammarAccess . getXRelationalExpressionAccess ( ) . getInstanceofKeyword_1_0_0_0_1 ( ) ) ; } } } // InternalXbaseWithAnnotations . g : 1227:5 : ( ( lv _ type _ 3_0 = ruleJvmTypeReference ) ) // InternalXbaseWithAnnotations . g : 1228:6 : ( lv _ type _ 3_0 = ruleJvmTypeReference ) { // InternalXbaseWithAnnotations . g : 1228:6 : ( lv _ type _ 3_0 = ruleJvmTypeReference ) // InternalXbaseWithAnnotations . g : 1229:7 : lv _ type _ 3_0 = ruleJvmTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXRelationalExpressionAccess ( ) . getTypeJvmTypeReferenceParserRuleCall_1_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_21 ) ; lv_type_3_0 = ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXRelationalExpressionRule ( ) ) ; } set ( current , "type" , lv_type_3_0 , "org.eclipse.xtext.xbase.Xtype.JvmTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; case 2 : // InternalXbaseWithAnnotations . g : 1248:4 : ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) { // InternalXbaseWithAnnotations . g : 1248:4 : ( ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) ) // InternalXbaseWithAnnotations . g : 1249:5 : ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) { // InternalXbaseWithAnnotations . g : 1249:5 : ( ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) ) // InternalXbaseWithAnnotations . g : 1250:6 : ( ( ( ) ( ( ruleOpCompare ) ) ) ) = > ( ( ) ( ( ruleOpCompare ) ) ) { // InternalXbaseWithAnnotations . g : 1260:6 : ( ( ) ( ( ruleOpCompare ) ) ) // InternalXbaseWithAnnotations . g : 1261:7 : ( ) ( ( ruleOpCompare ) ) { // InternalXbaseWithAnnotations . g : 1261:7 : ( ) // InternalXbaseWithAnnotations . g : 1262:8: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXRelationalExpressionAccess ( ) . getXBinaryOperationLeftOperandAction_1_1_0_0_0 ( ) , current ) ; } } // InternalXbaseWithAnnotations . g : 1268:7 : ( ( ruleOpCompare ) ) // InternalXbaseWithAnnotations . g : 1269:8 : ( ruleOpCompare ) { // InternalXbaseWithAnnotations . g : 1269:8 : ( ruleOpCompare ) // InternalXbaseWithAnnotations . g : 1270:9 : ruleOpCompare { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXRelationalExpressionRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXRelationalExpressionAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_1_1_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_9 ) ; ruleOpCompare ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } } } // InternalXbaseWithAnnotations . g : 1286:5 : ( ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) ) // InternalXbaseWithAnnotations . g : 1287:6 : ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) { // InternalXbaseWithAnnotations . g : 1287:6 : ( lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression ) // InternalXbaseWithAnnotations . g : 1288:7 : lv _ rightOperand _ 6_0 = ruleXOtherOperatorExpression { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXRelationalExpressionAccess ( ) . getRightOperandXOtherOperatorExpressionParserRuleCall_1_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_21 ) ; lv_rightOperand_6_0 = ruleXOtherOperatorExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXRelationalExpressionRule ( ) ) ; } set ( current , "rightOperand" , lv_rightOperand_6_0 , "org.eclipse.xtext.xbase.Xbase.XOtherOperatorExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } } } break ; default : break loop21 ; } } while ( true ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class ShutdownHook { /** * Start async deletion using background script * @ throws IOException */ private void startAsyncDelete ( ) throws IOException { } }
Runtime rt = Runtime . getRuntime ( ) ; File scriptFile = null ; if ( platformType == SelfExtractUtils . PlatformType_UNIX ) { scriptFile = writeCleanupFile ( SelfExtractUtils . PlatformType_UNIX ) ; rt . exec ( "chmod 750 " + scriptFile . getAbsolutePath ( ) ) ; rt . exec ( "sh -c " + scriptFile . getAbsolutePath ( ) + " &" ) ; } else if ( platformType == SelfExtractUtils . PlatformType_WINDOWS ) { scriptFile = writeCleanupFile ( SelfExtractUtils . PlatformType_WINDOWS ) ; // Note : must redirect output in order for script to run on windows . // This is a quirk validated by testing . Redirect to NUL is fine since we ' re // not trying to trap this output anyway . rt . exec ( "cmd /k start /B " + scriptFile . getAbsolutePath ( ) + " >/NUL 2>/NUL" ) ; } else if ( platformType == SelfExtractUtils . PlatformType_CYGWIN ) { scriptFile = writeCleanupFile ( SelfExtractUtils . PlatformType_CYGWIN ) ; // convert to Unix type path and run under bash rt . exec ( "bash -c " + scriptFile . getAbsolutePath ( ) . replace ( '\\' , '/' ) + " &" ) ; }
public class BadiCalendar { /** * / * [ deutsch ] * < p > Erzeugt ein neues Badi - Datum im ersten Hauptzyklus ( gregorianische Jahre 1844-2204 ) . < / p > * @ param vahid 19 - year - cycle ( in range 1-19) * @ param yearOfVahid year in range 1-19 * @ param month Badi month * @ param day day in range 1-19 * @ return new instance of { @ code BadiCalendar } * @ throws IllegalArgumentException in case of any inconsistencies */ public static BadiCalendar of ( int vahid , int yearOfVahid , BadiMonth month , int day ) { } }
return BadiCalendar . ofComplete ( 1 , vahid , yearOfVahid , month , day ) ;
public class FighterParser { /** * Parse a sherdog page * @ param doc Jsoup document of the sherdog page * @ throws IOException if connecting to sherdog fails */ @ Override public Fighter parseDocument ( Document doc ) throws IOException { } }
Fighter fighter = new Fighter ( ) ; fighter . setSherdogUrl ( ParserUtils . getSherdogPageUrl ( doc ) ) ; logger . info ( "Refreshing fighter {}" , fighter . getSherdogUrl ( ) ) ; try { Elements name = doc . select ( ".bio_fighter h1 span.fn" ) ; fighter . setName ( name . get ( 0 ) . html ( ) ) ; } catch ( Exception e ) { // no info , skipping } // Getting nick name try { Elements nickname = doc . select ( ".bio_fighter span.nickname em" ) ; fighter . setNickname ( nickname . get ( 0 ) . html ( ) ) ; } catch ( Exception e ) { // no info , skipping } // Birthday try { Elements birthday = doc . select ( "span[itemprop=\"birthDate\"]" ) ; fighter . setBirthday ( df . parse ( birthday . get ( 0 ) . html ( ) ) ) ; } catch ( Exception e ) { // no info , skipping } // height try { Elements height = doc . select ( ".size_info .height strong" ) ; fighter . setHeight ( height . get ( 0 ) . html ( ) ) ; } catch ( Exception e ) { // no info , skipping } // weight try { Elements weight = doc . select ( ".size_info .weight strong" ) ; fighter . setWeight ( weight . get ( 0 ) . html ( ) ) ; } catch ( Exception e ) { // no info , skipping } // wins try { Elements wins = doc . select ( ".bio_graph .counter" ) ; fighter . setWins ( Integer . parseInt ( wins . get ( 0 ) . html ( ) ) ) ; } catch ( Exception e ) { // no info , skipping } Elements winsMethods = doc . select ( ".bio_graph:first-of-type .graph_tag" ) ; try { fighter . setWinsKo ( Integer . parseInt ( winsMethods . get ( METHOD_KO ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setWinsSub ( Integer . parseInt ( winsMethods . get ( METHOD_SUBMISSION ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setWinsDec ( Integer . parseInt ( winsMethods . get ( METHOD_DECISION ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setWinsOther ( Integer . parseInt ( winsMethods . get ( METHOD_OTHERS ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } // loses try { Elements losses = doc . select ( ".bio_graph.loser .counter" ) ; fighter . setLosses ( Integer . parseInt ( losses . get ( 0 ) . html ( ) ) ) ; } catch ( Exception e ) { // no info , skipping } Elements lossesMethods = doc . select ( ".bio_graph.loser .graph_tag" ) ; try { fighter . setLossesKo ( ( Integer . parseInt ( lossesMethods . get ( METHOD_KO ) . html ( ) . split ( " " ) [ 0 ] ) ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setLossesSub ( Integer . parseInt ( lossesMethods . get ( METHOD_SUBMISSION ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setLossesDec ( Integer . parseInt ( lossesMethods . get ( METHOD_DECISION ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } try { fighter . setLossesOther ( Integer . parseInt ( lossesMethods . get ( METHOD_OTHERS ) . html ( ) . split ( " " ) [ 0 ] ) ) ; } catch ( Exception e ) { // no info , skipping } // draws and NC Elements drawsNc = doc . select ( ".right_side .bio_graph .card" ) ; for ( Element element : drawsNc ) { switch ( element . select ( "span.result" ) . html ( ) ) { case "Draws" : fighter . setDraws ( Integer . parseInt ( element . select ( "span.counter" ) . html ( ) ) ) ; break ; case "N/C" : fighter . setNc ( Integer . parseInt ( element . select ( "span.counter" ) . html ( ) ) ) ; break ; } } Elements picture = doc . select ( ".bio_fighter .content img[itemprop=\"image\"]" ) ; String pictureUrl = "https://www.sherdog.com" + picture . attr ( "src" ) . trim ( ) ; Elements fightTables = doc . select ( ".fight_history" ) ; logger . info ( "Found {} fight history tables" , fightTables . size ( ) ) ; fightTables . stream ( ) // excluding upcoming fights . filter ( div -> ! div . select ( ".module_header h2" ) . html ( ) . trim ( ) . contains ( "Upcoming" ) ) . collect ( Collectors . groupingBy ( div -> { String categoryName = div . select ( ".module_header h2" ) . html ( ) . trim ( ) . replaceAll ( "(?i)FIGHT HISTORY - " , "" ) . trim ( ) ; return FightType . fromString ( categoryName ) ; } ) ) . forEach ( ( key , div ) -> div . stream ( ) . map ( d -> d . select ( ".table table tr" ) ) . filter ( tdList -> tdList . size ( ) > 0 ) . findFirst ( ) . ifPresent ( tdList -> { List < Fight > f = getFights ( tdList , fighter ) ; f . forEach ( fight -> fight . setType ( key ) ) ; fighter . getFights ( ) . addAll ( f ) ; } ) ) ; List < Fight > sorted = fighter . getFights ( ) . stream ( ) . sorted ( Comparator . comparing ( Fight :: getDate , Comparator . nullsFirst ( Comparator . naturalOrder ( ) ) ) ) . collect ( Collectors . toList ( ) ) ; fighter . setFights ( sorted ) ; logger . info ( "Found {} fights for {}" , fighter . getFights ( ) . size ( ) , fighter . getName ( ) ) ; // setting the picture last to make sure the fighter variable has all the data if ( pictureUrl . length ( ) > 0 ) { fighter . setPicture ( PROCESSOR . process ( pictureUrl , fighter ) ) ; } return fighter ;
public class ListUtil { /** * return last element of the list * @ param list * @ param delimiter * @ param ignoreEmpty * @ return returns the last Element of a list */ public static String last ( String list , String delimiter , boolean ignoreEmpty ) { } }
if ( StringUtil . isEmpty ( list ) ) return "" ; int len = list . length ( ) ; char [ ] del ; if ( StringUtil . isEmpty ( delimiter ) ) { del = new char [ ] { ',' } ; } else del = delimiter . toCharArray ( ) ; int index ; int x ; while ( true ) { index = - 1 ; for ( int i = 0 ; i < del . length ; i ++ ) { x = list . lastIndexOf ( del [ i ] ) ; if ( x > index ) index = x ; } if ( index == - 1 ) { return list ; } else if ( index + 1 == len ) { if ( ! ignoreEmpty ) return "" ; list = list . substring ( 0 , len - 1 ) ; len -- ; } else { return list . substring ( index + 1 ) ; } }
public class Hex { /** * Converts a hexadecimal character to an integer . * @ param ch * A character to convert to an integer digit * @ param index * The index of the character in the source * @ return An integer * @ throws DecoderException * Thrown if ch is an illegal hex character */ protected static int toDigit ( final char ch , final int index ) throws IllegalArgumentException { } }
final int digit = Character . digit ( ch , 16 ) ; if ( digit == - 1 ) { throw new IllegalArgumentException ( "Illegal hexadecimal character " + ch + " at index " + index ) ; } return digit ;
public class L2CacheRepositoryDecorator { /** * Retrieves a batch of Entity IDs . * < p > If currently in transaction , splits the ids into those that have been dirtied in the current * transaction and those that have been left untouched . The untouched ids are loaded through the * cache , the dirtied ids are loaded from the decorated repository directly . * @ param ids list of entity IDs to retrieve * @ return List of { @ link Entity } s , missing ones excluded . */ private List < Entity > findAllBatch ( List < Object > ids ) { } }
String entityTypeId = getEntityType ( ) . getId ( ) ; Multimap < Boolean , Object > partitionedIds = Multimaps . index ( ids , id -> transactionInformation . isEntityDirty ( EntityKey . create ( entityTypeId , id ) ) ) ; Collection < Object > cleanIds = partitionedIds . get ( false ) ; Collection < Object > dirtyIds = partitionedIds . get ( true ) ; Map < Object , Entity > result = newHashMap ( uniqueIndex ( l2Cache . getBatch ( delegate ( ) , cleanIds ) , Entity :: getIdValue ) ) ; result . putAll ( delegate ( ) . findAll ( dirtyIds . stream ( ) ) . collect ( toMap ( Entity :: getIdValue , e -> e ) ) ) ; return ids . stream ( ) . filter ( result :: containsKey ) . map ( result :: get ) . collect ( toList ( ) ) ;
public class LocalNetworkGatewaysInner { /** * Creates or updates a local network gateway in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param localNetworkGatewayName The name of the local network gateway . * @ param parameters Parameters supplied to the create or update local network gateway operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < LocalNetworkGatewayInner > beginCreateOrUpdateAsync ( String resourceGroupName , String localNetworkGatewayName , LocalNetworkGatewayInner parameters , final ServiceCallback < LocalNetworkGatewayInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , localNetworkGatewayName , parameters ) , serviceCallback ) ;
public class X509Utils { /** * Saves the certificate to the file system . If the destination filename * ends with the pem extension , the certificate is written in the PEM format , * otherwise the certificate is written in the DER format . * @ param cert * @ param targetFile */ public static void saveCertificate ( X509Certificate cert , File targetFile ) { } }
File folder = targetFile . getAbsoluteFile ( ) . getParentFile ( ) ; if ( ! folder . exists ( ) ) { folder . mkdirs ( ) ; } File tmpFile = new File ( folder , Long . toHexString ( System . currentTimeMillis ( ) ) + ".tmp" ) ; try { boolean asPem = targetFile . getName ( ) . toLowerCase ( ) . endsWith ( ".pem" ) ; if ( asPem ) { // PEM encoded X509 PEMWriter pemWriter = null ; try { pemWriter = new PEMWriter ( new FileWriter ( tmpFile ) ) ; pemWriter . writeObject ( cert ) ; pemWriter . flush ( ) ; } finally { if ( pemWriter != null ) { pemWriter . close ( ) ; } } } else { // DER encoded X509 FileOutputStream fos = null ; try { fos = new FileOutputStream ( tmpFile ) ; fos . write ( cert . getEncoded ( ) ) ; fos . flush ( ) ; } finally { if ( fos != null ) { fos . close ( ) ; } } } // rename tmp file to target if ( targetFile . exists ( ) ) { targetFile . delete ( ) ; } tmpFile . renameTo ( targetFile ) ; } catch ( Exception e ) { if ( tmpFile . exists ( ) ) { tmpFile . delete ( ) ; } throw new RuntimeException ( "Failed to save certificate " + cert . getSubjectX500Principal ( ) . getName ( ) , e ) ; }
public class JavaGenerator { private MethodSpec newBuilder ( NameAllocator nameAllocator , MessageType message ) { } }
NameAllocator localNameAllocator = nameAllocator . clone ( ) ; String builderName = localNameAllocator . newName ( "builder" ) ; ClassName javaType = ( ClassName ) typeName ( message . type ( ) ) ; ClassName builderJavaType = javaType . nestedClass ( "Builder" ) ; MethodSpec . Builder result = MethodSpec . methodBuilder ( "newBuilder" ) . addAnnotation ( Override . class ) . addModifiers ( PUBLIC ) . returns ( builderJavaType ) . addStatement ( "$1T $2L = new $1T()" , builderJavaType , builderName ) ; List < Field > fields = message . fieldsAndOneOfFields ( ) ; for ( Field field : fields ) { String fieldName = localNameAllocator . get ( field ) ; if ( field . isRepeated ( ) || field . type ( ) . isMap ( ) ) { result . addStatement ( "$1L.$2L = $3T.copyOf($2S, $2L)" , builderName , fieldName , Internal . class ) ; } else { result . addStatement ( "$1L.$2L = $2L" , builderName , fieldName ) ; } } result . addStatement ( "$L.addUnknownFields(unknownFields())" , builderName ) ; result . addStatement ( "return $L" , builderName ) ; return result . build ( ) ;
public class NetUtils { /** * Handle the transition from pairs of attributes specifying a host and port * to a single colon separated one . * @ param conf the configuration to check * @ param oldBindAddressName the old address attribute name * @ param oldPortName the old port attribute name * @ param newBindAddressName the new combined name * @ return the complete address from the configuration */ @ Deprecated public static String getServerAddress ( Configuration conf , String oldBindAddressName , String oldPortName , String newBindAddressName ) { } }
String oldAddr = conf . get ( oldBindAddressName ) ; int oldPort = conf . getInt ( oldPortName , 0 ) ; String newAddrPort = conf . get ( newBindAddressName ) ; if ( oldAddr == null && oldPort == 0 ) { return toIpPort ( createSocketAddr ( newAddrPort ) ) ; } InetSocketAddress newAddr = NetUtils . createSocketAddr ( newAddrPort ) ; if ( oldAddr == null ) { oldAddr = newAddr . getAddress ( ) . getHostAddress ( ) ; } else { LOG . warn ( "Configuration parameter " + oldBindAddressName + " is deprecated. Use " + newBindAddressName + " instead." ) ; } if ( oldPort == 0 ) { oldPort = newAddr . getPort ( ) ; } else { LOG . warn ( "Configuration parameter " + oldPortName + " is deprecated. Use " + newBindAddressName + " instead." ) ; } try { return toIpPort ( oldAddr , oldPort ) ; } catch ( UnknownHostException e ) { LOG . error ( "DNS not supported." ) ; LOG . fatal ( e ) ; } return oldAddr + ":" + oldPort ;
public class PaymentKit { /** * urlEncode * @ param src * 微信参数 * @ return String * @ throws UnsupportedEncodingException * 编码错误 */ public static String urlEncode ( String src ) throws UnsupportedEncodingException { } }
return URLEncoder . encode ( src , Charsets . UTF_8 . name ( ) ) . replace ( "+" , "%20" ) ;
public class FastTimeZone { /** * Gets a TimeZone with GMT offsets . A GMT offset must be either ' Z ' , or ' UTC ' , or match * < em > ( GMT ) ? hh ? ( : ? mm ? ) ? < / em > , where h and m are digits representing hours and minutes . * @ param pattern The GMT offset * @ return A TimeZone with offset from GMT or null , if pattern does not match . */ public static TimeZone getGmtTimeZone ( final String pattern ) { } }
if ( "Z" . equals ( pattern ) || "UTC" . equals ( pattern ) ) { return GREENWICH ; } final Matcher m = GMT_PATTERN . matcher ( pattern ) ; if ( m . matches ( ) ) { final int hours = parseInt ( m . group ( 2 ) ) ; final int minutes = parseInt ( m . group ( 4 ) ) ; if ( hours == 0 && minutes == 0 ) { return GREENWICH ; } return new GmtTimeZone ( parseSign ( m . group ( 1 ) ) , hours , minutes ) ; } return null ;
public class PeepholeReplaceKnownMethods { /** * Try to evaluate known Numeric methods * parseInt ( ) , parseFloat ( ) */ private Node tryFoldKnownNumericMethods ( Node subtree , Node callTarget ) { } }
checkArgument ( subtree . isCall ( ) ) ; if ( isASTNormalized ( ) ) { // check if this is a call on a string method // then dispatch to specific folding method . String functionNameString = callTarget . getString ( ) ; Node firstArgument = callTarget . getNext ( ) ; if ( ( firstArgument != null ) && ( firstArgument . isString ( ) || firstArgument . isNumber ( ) ) && ( functionNameString . equals ( "parseInt" ) || functionNameString . equals ( "parseFloat" ) ) ) { subtree = tryFoldParseNumber ( subtree , functionNameString , firstArgument ) ; } } return subtree ;
public class DirectMappingEngine { /** * NOT THREAD - SAFE ( not reentrant ) */ private BootstrappingResults bootstrapMappingAndOntology ( String baseIRI , Optional < SQLPPMapping > inputPPMapping , Optional < OWLOntology > inputOntology ) throws MappingBootstrappingException { } }
this . baseIRI = fixBaseURI ( baseIRI ) ; try { SQLPPMapping newPPMapping = extractPPMapping ( inputPPMapping ) ; OWLOntology ontology = inputOntology . isPresent ( ) ? inputOntology . get ( ) : OWLManager . createOWLOntologyManager ( ) . createOntology ( IRI . create ( baseIRI ) ) ; // update ontology OWLOntologyManager manager = ontology . getOWLOntologyManager ( ) ; Set < OWLDeclarationAxiom > declarationAxioms = extractDeclarationAxioms ( manager , newPPMapping . getTripleMaps ( ) . stream ( ) . flatMap ( ax -> ax . getTargetAtoms ( ) . stream ( ) ) ) ; manager . addAxioms ( ontology , declarationAxioms ) ; return new DefaultBootstrappingResults ( newPPMapping , ontology ) ; } catch ( SQLException | MappingException | OWLOntologyCreationException e ) { throw new MappingBootstrappingException ( e ) ; }
public class AbstractListPreference { /** * Obtains the item color of the preference ' s dialog from a specific typed array . * @ param typedArray * The typed array , the item color should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainDialogItemColor ( @ NonNull final TypedArray typedArray ) { } }
setDialogItemColor ( typedArray . getColor ( R . styleable . AbstractListPreference_dialogItemColor , - 1 ) ) ;
public class StringUtils { /** * < p > Abbreviates a String using ellipses . This will turn * " Now is the time for all good men " into " Now is the time for . . . " < / p > * < p > Specifically : < / p > * < ul > * < li > If the number of characters in { @ code str } is less than or equal to * { @ code maxWidth } , return { @ code str } . < / li > * < li > Else abbreviate it to { @ code ( substring ( str , 0 , max - 3 ) + " . . . " ) } . < / li > * < li > If { @ code maxWidth } is less than { @ code 4 } , throw an * { @ code IllegalArgumentException } . < / li > * < li > In no case will it return a String of length greater than * { @ code maxWidth } . < / li > * < / ul > * < pre > * StringUtils . abbreviate ( null , * ) = null * StringUtils . abbreviate ( " " , 4 ) = " " * StringUtils . abbreviate ( " abcdefg " , 6 ) = " abc . . . " * StringUtils . abbreviate ( " abcdefg " , 7 ) = " abcdefg " * StringUtils . abbreviate ( " abcdefg " , 8 ) = " abcdefg " * StringUtils . abbreviate ( " abcdefg " , 4 ) = " a . . . " * StringUtils . abbreviate ( " abcdefg " , 3 ) = IllegalArgumentException * < / pre > * @ param str the String to check , may be null * @ param maxWidth maximum length of result String , must be at least 4 * @ return abbreviated String , { @ code null } if null String input * @ throws IllegalArgumentException if the width is too small * @ since 2.0 */ public static String abbreviate ( final String str , final int maxWidth ) { } }
final String defaultAbbrevMarker = "..." ; return abbreviate ( str , defaultAbbrevMarker , 0 , maxWidth ) ;
public class TransformMojo { /** * Writes mappings to file in Sling compatible JSON format . * @ param i18nMap mappings * @ param targetfile target file * @ param selectedOutputFormat Output format * @ throws IOException * @ throws JSONException */ private void writeTargetI18nFile ( SlingI18nMap i18nMap , File targetfile , OutputFormat selectedOutputFormat ) throws IOException , JSONException { } }
if ( selectedOutputFormat == OutputFormat . XML ) { FileUtils . fileWrite ( targetfile , CharEncoding . UTF_8 , i18nMap . getI18nXmlString ( ) ) ; } else if ( selectedOutputFormat == OutputFormat . PROPERTIES ) { FileUtils . fileWrite ( targetfile , CharEncoding . ISO_8859_1 , i18nMap . getI18nPropertiesString ( ) ) ; } else { FileUtils . fileWrite ( targetfile , CharEncoding . UTF_8 , i18nMap . getI18nJsonString ( ) ) ; } buildContext . refresh ( targetfile ) ;
public class MediaType { /** * Returns the media type sectors . * @ param text The media type text . * @ return String array with the sectors of the media type . */ private static String [ ] sectors ( final String text ) { } }
return new EnglishLowerCase ( MediaType . split ( text ) [ 0 ] ) . string ( ) . split ( "/" , 2 ) ;
public class AstUtil { /** * Tells you if the expression is a method call for a certain method name with a certain * number of arguments . * @ param expression * the ( potentially ) method call * @ param methodName * the name of the method expected * @ param numArguments * number of expected arguments * @ return * as described */ public static boolean isMethodCall ( Expression expression , String methodName , Range numArguments ) { } }
if ( expression instanceof MethodCallExpression && AstUtil . isMethodNamed ( ( MethodCallExpression ) expression , methodName ) ) { int arity = AstUtil . getMethodArguments ( expression ) . size ( ) ; if ( arity >= ( Integer ) numArguments . getFrom ( ) && arity <= ( Integer ) numArguments . getTo ( ) ) { return true ; } } return false ;
public class InteropFramework { /** * A method to connect to a URL and follow redirects if any . * @ param theURL a URL to connect to * @ return a { @ link URLConnection } * @ throws IOException if connection cannot be opened and no response is received . */ public URLConnection connectWithRedirect ( URL theURL ) throws IOException { } }
URLConnection conn = null ; String accept_header = buildAcceptHeader ( ) ; int redirect_count = 0 ; boolean done = false ; while ( ! done ) { if ( theURL . getProtocol ( ) . equals ( "file" ) ) { return null ; } Boolean isHttp = ( theURL . getProtocol ( ) . equals ( "http" ) || theURL . getProtocol ( ) . equals ( "https" ) ) ; logger . debug ( "Requesting: " + theURL . toString ( ) ) ; conn = theURL . openConnection ( ) ; if ( isHttp ) { logger . debug ( "Accept: " + accept_header ) ; conn . setRequestProperty ( "Accept" , accept_header ) ; } conn . setConnectTimeout ( 60000 ) ; conn . setReadTimeout ( 60000 ) ; conn . connect ( ) ; done = true ; // by default quit after one request if ( isHttp ) { logger . debug ( "Response: " + conn . getHeaderField ( 0 ) ) ; int rc = ( ( HttpURLConnection ) conn ) . getResponseCode ( ) ; if ( ( rc == HttpURLConnection . HTTP_MOVED_PERM ) || ( rc == HttpURLConnection . HTTP_MOVED_TEMP ) || ( rc == HttpURLConnection . HTTP_SEE_OTHER ) || ( rc == 307 ) ) { if ( redirect_count > 10 ) { return null ; // Error : too many redirects } redirect_count ++ ; String loc = conn . getHeaderField ( "Location" ) ; if ( loc != null ) { theURL = new URL ( loc ) ; done = false ; } else { return null ; // Bad redirect } } else if ( ( rc < 200 ) || ( rc >= 300 ) ) { return null ; // Unsuccessful } } } return conn ;
public class DescribeWorkspaceImagesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeWorkspaceImagesRequest describeWorkspaceImagesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeWorkspaceImagesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeWorkspaceImagesRequest . getImageIds ( ) , IMAGEIDS_BINDING ) ; protocolMarshaller . marshall ( describeWorkspaceImagesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeWorkspaceImagesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Aggregation { /** * Returns a { @ link StreamSupplier } of the records retrieved from aggregation for the specified query . * @ param < T > type of output objects * @ param query query * @ param outputClass class of output records * @ return supplier that streams query results */ @ Override public < T > StreamSupplier < T > query ( AggregationQuery query , Class < T > outputClass , DefiningClassLoader queryClassLoader ) { } }
checkArgument ( iterate ( queryClassLoader , Objects :: nonNull , ClassLoader :: getParent ) . anyMatch ( isEqual ( classLoader ) ) , "Unrelated queryClassLoader" ) ; List < String > fields = getMeasures ( ) . stream ( ) . filter ( query . getMeasures ( ) :: contains ) . collect ( toList ( ) ) ; List < AggregationChunk > allChunks = state . findChunks ( query . getPredicate ( ) , fields ) ; return consolidatedSupplier ( query . getKeys ( ) , fields , outputClass , query . getPredicate ( ) , allChunks , queryClassLoader ) ;
public class AWSDeviceFarmClient { /** * Updates the network profile with specific settings . * @ param updateNetworkProfileRequest * @ return Result of the UpdateNetworkProfile operation returned by the service . * @ throws ArgumentException * An invalid argument was specified . * @ throws NotFoundException * The specified entity was not found . * @ throws LimitExceededException * A limit was exceeded . * @ throws ServiceAccountException * There was a problem with the service account . * @ sample AWSDeviceFarm . UpdateNetworkProfile * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / devicefarm - 2015-06-23 / UpdateNetworkProfile " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateNetworkProfileResult updateNetworkProfile ( UpdateNetworkProfileRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateNetworkProfile ( request ) ;
public class CompoundConstraint { /** * Add the list of constraints to the set of constraints aggregated by this * compound constraint . * @ param constraints * the list of constraints to add * @ return A reference to this , to support chaining . */ public CompoundConstraint addAll ( List constraints ) { } }
Algorithms . instance ( ) . forEach ( constraints , new Block ( ) { protected void handle ( Object o ) { add ( ( Constraint ) o ) ; } } ) ; return this ;
public class Range { /** * Clamp an int to a min / max value . * @ param n The value to be clamped * @ param min The minimum * @ param max The maximum * @ return The value passed in , or minimum if n & lt ; minimum , or maximum if n & gt ; maximum */ public static int clamp ( int n , int min , int max ) { } }
if ( n < min ) return min ; if ( n > max ) return max ; return n ;
public class DenseTensorBuilder { /** * Increment algorithm for the case where both tensors have the same set of * dimensions . * @ param other * @ param multiplier */ private void simpleIncrement ( TensorBase other , double multiplier ) { } }
Preconditions . checkArgument ( Arrays . equals ( other . getDimensionNumbers ( ) , getDimensionNumbers ( ) ) ) ; if ( other instanceof DenseTensorBase ) { double [ ] otherTensorValues = ( ( DenseTensorBase ) other ) . values ; Preconditions . checkArgument ( otherTensorValues . length == values . length ) ; int length = values . length ; for ( int i = 0 ; i < length ; i ++ ) { values [ i ] += otherTensorValues [ i ] * multiplier ; } } else { int otherSize = other . size ( ) ; for ( int i = 0 ; i < otherSize ; i ++ ) { long keyNum = other . indexToKeyNum ( i ) ; double value = other . getByIndex ( i ) ; values [ keyNumToIndex ( keyNum ) ] += value * multiplier ; } }
public class AbstractExtractionCondition { /** * { @ inheritDoc } * @ see jp . co . future . uroborosql . fluent . ExtractionCondition # isNull ( java . lang . String ) */ @ SuppressWarnings ( "unchecked" ) @ Override public T isNull ( final String col ) { } }
context ( ) . param ( CaseFormat . CAMEL_CASE . convert ( col ) , new IsNull ( col ) ) ; this . useOperator = true ; return ( T ) this ;
public class JsonMapper { /** * Adds a serializer to this mapper . Allows a user to alter the serialization behavior for a certain type . * @ param classToMap the class to map * @ param classSerializer the serializer * @ param < T > the type of objects that will be serialized by the given serializer */ public < T > void addClassSerializer ( Class < ? extends T > classToMap , JsonSerializer < T > classSerializer ) { } }
setNewObjectMapper ( ) ; // Is this right , setting a new object mapper on each add operation ? SimpleModule mod = new SimpleModule ( "GeolatteCommonModule-" + classSerializer . getClass ( ) . getSimpleName ( ) ) ; mod . addSerializer ( classToMap , classSerializer ) ; mapper . registerModule ( mod ) ;
public class Convolution { /** * Pooling 2d implementation * @ param img * @ param kh * @ param kw * @ param sy * @ param sx * @ param ph * @ param pw * @ param dh * @ param dw * @ param isSameMode * @ param type * @ param extra optional argument . I . e . used in pnorm pooling . * @ param virtualHeight * @ param virtualWidth * @ param out * @ return */ public static INDArray pooling2D ( INDArray img , int kh , int kw , int sy , int sx , int ph , int pw , int dh , int dw , boolean isSameMode , Pooling2D . Pooling2DType type , Pooling2D . Divisor divisor , double extra , int virtualHeight , int virtualWidth , INDArray out ) { } }
Pooling2D pooling = Pooling2D . builder ( ) . arrayInputs ( new INDArray [ ] { img } ) . arrayOutputs ( new INDArray [ ] { out } ) . config ( Pooling2DConfig . builder ( ) . dH ( dh ) . dW ( dw ) . extra ( extra ) . kH ( kh ) . kW ( kw ) . pH ( ph ) . pW ( pw ) . isSameMode ( isSameMode ) . sH ( sy ) . sW ( sx ) . virtualHeight ( virtualHeight ) . virtualWidth ( virtualWidth ) . type ( type ) . divisor ( divisor ) . build ( ) ) . build ( ) ; Nd4j . getExecutioner ( ) . execAndReturn ( pooling ) ; return out ;
public class CollectionUtils { /** * Counts the number of elements in the { @ link Iterable } collection . If { @ link Iterable } is null or contains * no elements , then count will be 0 . If the { @ link Iterable } is a { @ link Collection } , then { @ link Collection # size ( ) } * is returned , otherwise the elements of the { @ link Iterable } are iterated over , counting the number of elements * in the iteration in order to determine it ' s size . * @ param iterable { @ link Iterable } collection of elements being evaluated . * @ return an integer value indicating the number of elements in the { @ link Iterable } collection ( i . e . size ) . * @ see java . lang . Iterable * @ see java . util . Collection # size ( ) * @ see # count ( Iterable , Filter ) */ @ NullSafe public static long count ( Iterable < ? > iterable ) { } }
return iterable instanceof Collection ? ( ( Collection ) iterable ) . size ( ) : count ( iterable , ( element ) -> true ) ;
public class DefaultNodeManager { /** * Handles a file upload . */ private void handleUpload ( AsyncFile file , String address , Handler < AsyncResult < Void > > doneHandler ) { } }
vertx . eventBus ( ) . registerHandler ( address , handleUpload ( file , address ) , doneHandler ) ;
public class DockerAssemblyManager { /** * Create an docker tar archive from the given configuration which can be send to the Docker host for * creating the image . * @ param imageName Name of the image to create ( used for creating build directories ) * @ param params Mojos parameters ( used for finding the directories ) * @ param buildConfig configuration for how to build the image * @ param log Logger used to display warning if permissions are to be normalized * @ param finalCustomizer finalCustomizer to be applied to the tar archive * @ return file holding the path to the created assembly tar file * @ throws MojoExecutionException */ public File createDockerTarArchive ( String imageName , final MojoParameters params , final BuildImageConfiguration buildConfig , Logger log , ArchiverCustomizer finalCustomizer ) throws MojoExecutionException { } }
final BuildDirs buildDirs = createBuildDirs ( imageName , params ) ; final AssemblyConfiguration assemblyConfig = buildConfig . getAssemblyConfiguration ( ) ; final List < ArchiverCustomizer > archiveCustomizers = new ArrayList < > ( ) ; // Build up assembly . In dockerfile mode this must be added explicitly in the Dockerfile with an ADD if ( hasAssemblyConfiguration ( assemblyConfig ) ) { createAssemblyArchive ( assemblyConfig , params , buildDirs ) ; } try { if ( buildConfig . isDockerFileMode ( ) ) { // Use specified docker directory which must include a Dockerfile . final File dockerFile = buildConfig . getAbsoluteDockerFilePath ( params ) ; if ( ! dockerFile . exists ( ) ) { throw new MojoExecutionException ( "Configured Dockerfile \"" + buildConfig . getDockerFile ( ) + "\" (resolved to \"" + dockerFile + "\") doesn't exist" ) ; } FixedStringSearchInterpolator interpolator = DockerFileUtil . createInterpolator ( params , buildConfig . getFilter ( ) ) ; verifyGivenDockerfile ( dockerFile , buildConfig , interpolator , log ) ; interpolateDockerfile ( dockerFile , buildDirs , interpolator ) ; // User dedicated Dockerfile from extra directory archiveCustomizers . add ( new ArchiverCustomizer ( ) { @ Override public TarArchiver customize ( TarArchiver archiver ) throws IOException { DefaultFileSet fileSet = DefaultFileSet . fileSet ( buildConfig . getAbsoluteContextDirPath ( params ) ) ; addDockerIncludesExcludesIfPresent ( fileSet , params ) ; // Exclude non - interpolated dockerfile from source tree // Interpolated Dockerfile is already added as it was created into the output directory when // using dir dir mode excludeDockerfile ( fileSet , dockerFile ) ; // If the content is added as archive , then we need to add the Dockerfile from the builddir // directly to docker . tar ( as the output builddir is not picked up in archive mode ) if ( isArchive ( assemblyConfig ) ) { String name = dockerFile . getName ( ) ; archiver . addFile ( new File ( buildDirs . getOutputDirectory ( ) , name ) , name ) ; } archiver . addFileSet ( fileSet ) ; return archiver ; } } ) ; } else { // Create custom docker file in output dir DockerFileBuilder builder = createDockerFileBuilder ( buildConfig , assemblyConfig ) ; builder . write ( buildDirs . getOutputDirectory ( ) ) ; // Add own Dockerfile final File dockerFile = new File ( buildDirs . getOutputDirectory ( ) , DOCKERFILE_NAME ) ; archiveCustomizers . add ( new ArchiverCustomizer ( ) { @ Override public TarArchiver customize ( TarArchiver archiver ) throws IOException { archiver . addFile ( dockerFile , DOCKERFILE_NAME ) ; return archiver ; } } ) ; } // If required make all files in the assembly executable if ( assemblyConfig != null ) { AssemblyConfiguration . PermissionMode mode = assemblyConfig . getPermissions ( ) ; if ( mode == AssemblyConfiguration . PermissionMode . exec || mode == AssemblyConfiguration . PermissionMode . auto && EnvUtil . isWindows ( ) ) { archiveCustomizers . add ( new AllFilesExecCustomizer ( log ) ) ; } } if ( finalCustomizer != null ) { archiveCustomizers . add ( finalCustomizer ) ; } return createBuildTarBall ( buildDirs , archiveCustomizers , assemblyConfig , buildConfig . getCompression ( ) ) ; } catch ( IOException e ) { throw new MojoExecutionException ( String . format ( "Cannot create %s in %s" , DOCKERFILE_NAME , buildDirs . getOutputDirectory ( ) ) , e ) ; }
public class ShakeRenderer { /** * Decode to be used to implement an AJAX version of TabView . This methods * receives and processes input made by the user . More specifically , it * ckecks whether the user has interacted with the current b : tabView . The * default implementation simply stores the input value in the list of * submitted values . If the validation checks are passed , the values in the * < code > submittedValues < / code > list are store in the backend bean . * @ param context * the FacesContext . * @ param component * the current b : tabView . */ @ Override public void decode ( FacesContext context , UIComponent component ) { } }
TabView tabView = ( TabView ) component ; decodeBehaviors ( context , tabView ) ; String clientId = tabView . getClientId ( context ) ; String activeIndexId = clientId . replace ( ":" , "_" ) + "_activeIndex" ; String activeIndexValue = ( String ) context . getExternalContext ( ) . getRequestParameterMap ( ) . get ( activeIndexId ) ; new AJAXRenderer ( ) . decode ( context , component ) ; if ( null != activeIndexValue && activeIndexValue . length ( ) > 0 ) { try { if ( Integer . valueOf ( activeIndexValue ) != tabView . getActiveIndex ( ) ) { tabView . setActiveIndex ( Integer . valueOf ( activeIndexValue ) ) ; } } catch ( NumberFormatException e ) { } }
public class SuperstepBarrier { /** * Barrier will release the waiting thread if an event occurs . */ @ Override public void onEvent ( TaskEvent event ) { } }
if ( event instanceof TerminationEvent ) { terminationSignaled = true ; } else if ( event instanceof AllWorkersDoneEvent ) { AllWorkersDoneEvent wde = ( AllWorkersDoneEvent ) event ; aggregatorNames = wde . getAggregatorNames ( ) ; aggregates = wde . getAggregates ( userCodeClassLoader ) ; } else { throw new IllegalArgumentException ( "Unknown event type." ) ; } latch . countDown ( ) ;
public class RateThisApp { /** * Store install date . * Install date is retrieved from package manager if possible . * @ param context * @ param editor */ private static void storeInstallDate ( final Context context , SharedPreferences . Editor editor ) { } }
Date installDate = new Date ( ) ; if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . GINGERBREAD ) { PackageManager packMan = context . getPackageManager ( ) ; try { PackageInfo pkgInfo = packMan . getPackageInfo ( context . getPackageName ( ) , 0 ) ; installDate = new Date ( pkgInfo . firstInstallTime ) ; } catch ( PackageManager . NameNotFoundException e ) { e . printStackTrace ( ) ; } } editor . putLong ( KEY_INSTALL_DATE , installDate . getTime ( ) ) ; log ( "First install: " + installDate . toString ( ) ) ;
public class Tags { /** * Parse a string representing a tag . A tag string should have the format { @ code key = value } . * Whitespace at the ends of the key and value will be removed . Both the key and value must * have at least one character . * @ param tagString string with encoded tag * @ return tag parsed from the string */ public static Tag parseTag ( String tagString ) { } }
String k ; String v ; int eqIndex = tagString . indexOf ( "=" ) ; if ( eqIndex < 0 ) { throw new IllegalArgumentException ( "key and value must be separated by '='" ) ; } k = tagString . substring ( 0 , eqIndex ) . trim ( ) ; v = tagString . substring ( eqIndex + 1 , tagString . length ( ) ) . trim ( ) ; return newTag ( k , v ) ;
public class UidManager { /** * Helper to print the cells in a given family for a given row , if any . * @ param row The row to print . * @ param family Only cells in this family ( if any ) will be printed . * @ param formard If true , this row contains a forward mapping ( name to ID ) . * Otherwise the row is assumed to contain a reverse mapping ( ID to name ) . * @ return { @ code true } if at least one cell was printed . */ private static boolean printResult ( final ArrayList < KeyValue > row , final byte [ ] family , final boolean formard ) { } }
if ( null == row || row . isEmpty ( ) ) { return false ; } final byte [ ] key = row . get ( 0 ) . key ( ) ; String name = formard ? CliUtils . fromBytes ( key ) : null ; String id = formard ? null : Arrays . toString ( key ) ; boolean printed = false ; for ( final KeyValue kv : row ) { if ( ! Bytes . equals ( kv . family ( ) , family ) ) { continue ; } printed = true ; if ( formard ) { id = Arrays . toString ( kv . value ( ) ) ; } else { name = CliUtils . fromBytes ( kv . value ( ) ) ; } System . out . println ( CliUtils . fromBytes ( kv . qualifier ( ) ) + ' ' + name + ": " + id ) ; } return printed ;
public class PluginManager { /** * Returns a complete classpath for all loaded plugins */ public String getCompleteClassPath ( ) { } }
StringBuilder sb = new StringBuilder ( ) ; if ( baseClassPath != null ) { sb . append ( baseClassPath + File . pathSeparator ) ; } for ( Class < ? extends Plugin > pluginClass : implementations . keySet ( ) ) { Set < PluginContext > set = implementations . get ( pluginClass ) ; for ( PluginContext pluginContext : set ) { sb . append ( pluginContext . getClassLocation ( ) + File . pathSeparator ) ; } } return sb . toString ( ) ;
public class StringIterate { /** * Converts a string of tokens separated by the specified separator to a { @ link MutableList } . */ public static MutableList < String > trimmedTokensToList ( String string , String separator ) { } }
return StringIterate . trimStringList ( StringIterate . tokensToList ( string , separator ) ) ;
public class VariantContextConverter { /** * Assumes that ori is in the form " POS : REF : ALT _ 0 ( , ALT _ N ) * : ALT _ IDX " . * @ param ori * @ return */ protected static Integer getOriginalPosition ( String [ ] ori ) { } }
if ( ori != null && ori . length == 4 ) { return Integer . parseInt ( ori [ 0 ] ) ; } return null ;
public class IssueManager { /** * DEPRECATED . use category . delete ( ) instead * deletes an { @ link IssueCategory } . < br > * @ param category the { @ link IssueCategory } . * @ throws RedmineAuthenticationException thrown in case something went wrong while trying to login * @ throws RedmineException thrown in case something went wrong in Redmine * @ throws NotFoundException thrown in case an object can not be found */ @ Deprecated public void deleteCategory ( IssueCategory category ) throws RedmineException { } }
transport . deleteObject ( IssueCategory . class , Integer . toString ( category . getId ( ) ) ) ;
public class LdaGibbsSampler { /** * Driver with example data . * @ param args * @ throws IOException */ public static void main ( String [ ] args ) throws IOException { } }
String infile = "../example-data/data-lda.txt" ; String stopwordfile = "../models/stopwords/stopwords.txt" ; BufferedReader in = new BufferedReader ( new InputStreamReader ( new FileInputStream ( infile ) , "utf8" ) ) ; // BufferedWriter out = new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( // outfile ) , enc2 ) ) ; StopWords sw = new StopWords ( stopwordfile ) ; LabelAlphabet dict = new LabelAlphabet ( ) ; // words in documents ArrayList < TIntArrayList > documentsList = new ArrayList < TIntArrayList > ( ) ; String line = null ; while ( ( line = in . readLine ( ) ) != null ) { line = line . trim ( ) ; if ( line . length ( ) == 0 ) continue ; String [ ] toks = line . split ( "\\s+" ) ; TIntArrayList wordlist = new TIntArrayList ( ) ; for ( int j = 0 ; j < toks . length ; j ++ ) { String tok = toks [ j ] ; if ( sw . isStopWord ( tok ) ) continue ; int idx = dict . lookupIndex ( tok ) ; wordlist . add ( idx ) ; } documentsList . add ( wordlist ) ; } in . close ( ) ; int [ ] [ ] documents ; documents = new int [ documentsList . size ( ) ] [ ] ; for ( int i = 0 ; i < documents . length ; i ++ ) { documents [ i ] = documentsList . get ( i ) . toArray ( ) ; } // vocabulary int V = dict . size ( ) ; int M = documents . length ; // # topics int K = 4 ; // good values alpha = 2 , beta = . 5 float alpha = 2f ; float beta = .5f ; System . out . println ( "Latent Dirichlet Allocation using Gibbs Sampling." ) ; LdaGibbsSampler lda = new LdaGibbsSampler ( documents , V ) ; lda . configure ( 10000 , 2000 , 100 , 10 ) ; lda . gibbs ( K , alpha , beta ) ; float [ ] [ ] theta = lda . getTheta ( ) ; float [ ] [ ] phi = lda . getPhi ( ) ; System . out . println ( ) ; System . out . println ( ) ; System . out . println ( "Document--Topic Associations, Theta[d][k] (alpha=" + alpha + ")" ) ; System . out . print ( "d\\k\t" ) ; for ( int m = 0 ; m < theta [ 0 ] . length ; m ++ ) { System . out . print ( " " + m % 10 + " " ) ; } System . out . println ( ) ; for ( int m = 0 ; m < theta . length ; m ++ ) { System . out . print ( m + "\t" ) ; for ( int k = 0 ; k < theta [ m ] . length ; k ++ ) { // System . out . print ( theta [ m ] [ k ] + " " ) ; System . out . print ( shadefloat ( theta [ m ] [ k ] , 1 ) + " " ) ; } System . out . println ( ) ; } System . out . println ( ) ; System . out . println ( "Topic--Term Associations, Phi[k][w] (beta=" + beta + ")" ) ; System . out . print ( "k\\w\t" ) ; for ( int w = 0 ; w < phi [ 0 ] . length ; w ++ ) { System . out . print ( " " + dict . lookupString ( w ) + " " ) ; } System . out . println ( ) ; for ( int k = 0 ; k < phi . length ; k ++ ) { System . out . print ( k + "\t" ) ; for ( int w = 0 ; w < phi [ k ] . length ; w ++ ) { System . out . print ( lnf . format ( phi [ k ] [ w ] ) + " " ) ; // System . out . print ( phi [ k ] [ w ] + " " ) ; // System . out . print ( shadefloat ( phi [ k ] [ w ] , 1 ) + " " ) ; } System . out . println ( ) ; } for ( int k = 0 ; k < phi . length ; k ++ ) { int [ ] top = MyArrays . sort ( phi [ k ] ) ; for ( int w = 0 ; w < 10 ; w ++ ) { System . out . print ( dict . lookupString ( top [ w ] ) + " " ) ; } System . out . println ( ) ; }
public class MBeanServers { /** * Fetch Jolokia MBeanServer when it gets registered , remove it if being unregistered * @ param notification notification emitted * @ param handback not used here */ public synchronized void handleNotification ( Notification notification , Object handback ) { } }
String type = notification . getType ( ) ; if ( REGISTRATION_NOTIFICATION . equals ( type ) ) { jolokiaMBeanServer = lookupJolokiaMBeanServer ( ) ; // We need to add the listener provided during construction time to add the Jolokia MBeanServer // so that it is kept updated , too . if ( jolokiaMBeanServerListener != null ) { JmxUtil . addMBeanRegistrationListener ( jolokiaMBeanServer , jolokiaMBeanServerListener , null ) ; } } else if ( UNREGISTRATION_NOTIFICATION . equals ( type ) ) { jolokiaMBeanServer = null ; } allMBeanServers . clear ( ) ; if ( jolokiaMBeanServer != null ) { allMBeanServers . add ( jolokiaMBeanServer ) ; } allMBeanServers . addAll ( detectedMBeanServers ) ;
public class ConstructorBuilder { /** * Build the signature . * @ param node the XML element that specifies which components to document * @ param constructorDocTree the content tree to which the documentation will be added */ public void buildSignature ( XMLNode node , Content constructorDocTree ) { } }
constructorDocTree . addContent ( writer . getSignature ( ( ConstructorDoc ) constructors . get ( currentConstructorIndex ) ) ) ;
public class GetSlotTypeVersionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetSlotTypeVersionsRequest getSlotTypeVersionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getSlotTypeVersionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getSlotTypeVersionsRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( getSlotTypeVersionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( getSlotTypeVersionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GetObjectRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetObjectRequest getObjectRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getObjectRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getObjectRequest . getPath ( ) , PATH_BINDING ) ; protocolMarshaller . marshall ( getObjectRequest . getRange ( ) , RANGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CaptchaValidateController { /** * parameter _ captcha need to be availabe in request * @ return * @ throws Exception */ @ GET @ Path ( "validate" ) public boolean validate ( @ Context HttpServletRequest request ) throws Exception { } }
return captchaEngine . validate ( request ) ;
public class Unchecked { /** * Wrap a { @ link CheckedComparator } in a { @ link Comparator } with a custom handler for checked exceptions . */ public static < T > Comparator < T > comparator ( CheckedComparator < T > comparator , Consumer < Throwable > handler ) { } }
return ( t1 , t2 ) -> { try { return comparator . compare ( t1 , t2 ) ; } catch ( Throwable e ) { handler . accept ( e ) ; throw new IllegalStateException ( "Exception handler must throw a RuntimeException" , e ) ; } } ;
public class MutableFst { /** * Deletes a state ; * @ param state the state to delete */ private void deleteState ( MutableState state ) { } }
if ( state . getId ( ) == this . start . getId ( ) ) { throw new IllegalArgumentException ( "Cannot delete start state." ) ; } // we ' re going to " compact " all of the nulls out and remap state ids at the end this . states . set ( state . getId ( ) , null ) ; if ( isUsingStateSymbols ( ) ) { stateSymbols . remove ( state . getId ( ) ) ; } // this state won ' t be incoming to any of its arc ' s targets anymore for ( MutableArc mutableArc : state . getArcs ( ) ) { mutableArc . getNextState ( ) . removeIncomingState ( state ) ; } // delete arc ' s with nextstate equal to stateid for ( MutableState inState : state . getIncomingStates ( ) ) { Iterator < MutableArc > iter = inState . getArcs ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { MutableArc arc = iter . next ( ) ; if ( arc . getNextState ( ) == state ) { iter . remove ( ) ; } } }