idx
int64
0
41.2k
question
stringlengths
73
5.81k
target
stringlengths
5
918
1,000
@ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public AtlasRelationship create ( AtlasRelationship relationship ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "RelationshipREST.create(" + relationship + ")" ) ; } return relationshipStore . create ( relationship ) ; } finally { AtlasPerfTracer . log ( perf ) ; } }
Create a new relationship between entities .
1,001
@ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public AtlasRelationship update ( AtlasRelationship relationship ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "RelationshipREST.update(" + relationship + ")" ) ; } return relationshipStore . update ( relationship ) ; } finally { AtlasPerfTracer . log ( perf ) ; } }
Update an existing relationship between entities .
1,002
@ Path ( "/guid/{guid}" ) @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public AtlasRelationship getById ( @ PathParam ( "guid" ) String guid ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "RelationshipREST.getById(" + guid + ")" ) ; } return relationshipStore . getById ( guid ) ; } finally { AtlasPerfTracer . log ( perf ) ; } }
Get relationship information between entities using guid .
1,003
@ Path ( "/guid/{guid}" ) @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public void deleteById ( @ PathParam ( "guid" ) String guid ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "RelationshipREST.deleteById(" + guid + ")" ) ; } relationshipStore . deleteById ( guid ) ; } finally { AtlasPerfTracer . log ( perf ) ; } }
Delete a relationship between entities using guid .
1,004
public void addTrait ( List < String > entityGuids , ITypedStruct traitInstance ) throws RepositoryException { Preconditions . checkNotNull ( entityGuids , "entityGuids list cannot be null" ) ; Preconditions . checkNotNull ( traitInstance , "Trait instance cannot be null" ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Adding a new trait={} for entities={}" , traitInstance . getTypeName ( ) , entityGuids ) ; } GraphTransactionInterceptor . lockObjectAndReleasePostCommit ( entityGuids ) ; for ( String entityGuid : entityGuids ) { addTraitImpl ( entityGuid , traitInstance ) ; } }
Adds a new trait to the list of entities represented by their respective guids
1,005
AtlasVertex findVertex ( DataTypes . TypeCategory category , String typeName ) { LOG . debug ( "Finding AtlasVertex for {}.{}" , category , typeName ) ; Iterator results = graph . query ( ) . has ( Constants . TYPENAME_PROPERTY_KEY , typeName ) . vertices ( ) . iterator ( ) ; AtlasVertex vertex = null ; if ( results != null && results . hasNext ( ) ) { vertex = ( AtlasVertex ) results . next ( ) ; } return vertex ; }
Find vertex for the given type category and name else create new vertex
1,006
private List < AtlasVertex > createVertices ( List < TypeVertexInfo > infoList ) throws AtlasException { List < AtlasVertex > result = new ArrayList < > ( infoList . size ( ) ) ; List < String > typeNames = Lists . transform ( infoList , new Function < TypeVertexInfo , String > ( ) { public String apply ( TypeVertexInfo input ) { return input . getTypeName ( ) ; } } ) ; Map < String , AtlasVertex > vertices = findVertices ( typeNames ) ; for ( TypeVertexInfo info : infoList ) { AtlasVertex vertex = vertices . get ( info . getTypeName ( ) ) ; if ( ! GraphHelper . elementExists ( vertex ) ) { LOG . debug ( "Adding vertex {}{}" , PROPERTY_PREFIX , info . getTypeName ( ) ) ; vertex = graph . addVertex ( ) ; setProperty ( vertex , Constants . VERTEX_TYPE_PROPERTY_KEY , VERTEX_TYPE ) ; setProperty ( vertex , Constants . TYPE_CATEGORY_PROPERTY_KEY , info . getCategory ( ) ) ; setProperty ( vertex , Constants . TYPENAME_PROPERTY_KEY , info . getTypeName ( ) ) ; } String newDescription = info . getTypeDescription ( ) ; if ( newDescription != null ) { String oldDescription = getPropertyKey ( Constants . TYPEDESCRIPTION_PROPERTY_KEY ) ; if ( ! newDescription . equals ( oldDescription ) ) { setProperty ( vertex , Constants . TYPEDESCRIPTION_PROPERTY_KEY , newDescription ) ; } } else { LOG . debug ( " type description is null " ) ; } result . add ( vertex ) ; } return result ; }
Finds or creates type vertices with the information specified .
1,007
public IDataType onTypeFault ( String typeName ) throws AtlasException { Context context = new Context ( ) ; TypesDef typesDef = getTypeFromStore ( typeName , context ) ; if ( typesDef . isEmpty ( ) ) { return null ; } TransientTypeSystem transientTypeSystem = typeSystem . createTransientTypeSystem ( context . getTypesDef ( ) , false ) ; Map < String , IDataType > typesAdded = transientTypeSystem . getTypesAdded ( ) ; putAll ( typesAdded . values ( ) ) ; return typesAdded . get ( typeName ) ; }
Check the type store for the requested type . If found in the type store the type and any required super and attribute types are loaded from the type store and added to the cache .
1,008
private void evictionWarningIfNeeded ( ) { if ( evictionWarningThrottle <= 0 ) { return ; } evictionsSinceWarning ++ ; if ( evictionsSinceWarning >= evictionWarningThrottle ) { DateFormat dateFormat = DateFormat . getDateTimeInstance ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( "There have been " + evictionsSinceWarning + " evictions from the cache since " + dateFormat . format ( lastEvictionWarning ) ) ; } evictionsSinceWarning = 0 ; lastEvictionWarning = new Date ( ) ; } }
Logs a warning if a threshold number of evictions has occurred since the last warning .
1,009
@ Path ( "/{guid}" ) @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public AtlasLineageInfo getLineageGraph ( @ PathParam ( "guid" ) String guid , @ QueryParam ( "direction" ) @ DefaultValue ( DEFAULT_DIRECTION ) LineageDirection direction , @ QueryParam ( "depth" ) @ DefaultValue ( DEFAULT_DEPTH ) int depth ) throws AtlasBaseException { AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "LineageREST.getLineageGraph(" + guid + "," + direction + "," + depth + ")" ) ; } return atlasLineageService . getAtlasLineageInfo ( guid , direction , depth ) ; } finally { AtlasPerfTracer . log ( perf ) ; } }
Returns lineage info about entity .
1,010
private void initialize ( AtlasGraph graph ) throws RepositoryException , IndexException { AtlasGraphManagement management = graph . getManagementSystem ( ) ; try { if ( management . containsPropertyKey ( Constants . VERTEX_TYPE_PROPERTY_KEY ) ) { LOG . info ( "Global indexes already exist for graph" ) ; management . commit ( ) ; return ; } LOG . info ( "Indexes do not exist, Creating indexes for graph." ) ; management . createVertexIndex ( Constants . VERTEX_INDEX , Constants . BACKING_INDEX , Collections . < AtlasPropertyKey > emptyList ( ) ) ; management . createEdgeIndex ( Constants . EDGE_INDEX , Constants . BACKING_INDEX ) ; createIndexes ( management , Constants . GUID_PROPERTY_KEY , String . class , true , AtlasCardinality . SINGLE , true , true ) ; createIndexes ( management , Constants . TIMESTAMP_PROPERTY_KEY , Long . class , false , AtlasCardinality . SINGLE , false , false ) ; createIndexes ( management , Constants . MODIFICATION_TIMESTAMP_PROPERTY_KEY , Long . class , false , AtlasCardinality . SINGLE , false , false ) ; createIndexes ( management , Constants . STATE_PROPERTY_KEY , String . class , false , AtlasCardinality . SINGLE , false , false ) ; createIndexes ( management , Constants . CREATED_BY_KEY , String . class , false , AtlasCardinality . SINGLE , true , true ) ; createIndexes ( management , Constants . MODIFIED_BY_KEY , String . class , false , AtlasCardinality . SINGLE , true , true ) ; createIndexes ( management , Constants . ENTITY_TYPE_PROPERTY_KEY , String . class , false , AtlasCardinality . SINGLE , true , true ) ; createIndexes ( management , Constants . SUPER_TYPES_PROPERTY_KEY , String . class , false , AtlasCardinality . SET , true , true ) ; createIndexes ( management , Constants . TRAIT_NAMES_PROPERTY_KEY , String . class , false , AtlasCardinality . SET , true , true ) ; createFullTextIndex ( management ) ; createTypeStoreIndexes ( management ) ; commit ( management ) ; LOG . info ( "Index creation for global keys complete." ) ; } catch ( Throwable t ) { rollback ( management ) ; throw new RepositoryException ( t ) ; } }
Initializes the indices for the graph - create indices for Global AtlasVertex Keys
1,011
public void onAdd ( Collection < ? extends IDataType > dataTypes ) throws AtlasException { AtlasGraphManagement management = provider . get ( ) . getManagementSystem ( ) ; for ( IDataType dataType : dataTypes ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Creating indexes for type name={}, definition={}" , dataType . getName ( ) , dataType . getClass ( ) ) ; } try { addIndexForType ( management , dataType ) ; LOG . info ( "Index creation for type {} complete" , dataType . getName ( ) ) ; } catch ( Throwable throwable ) { LOG . error ( "Error creating index for type {}" , dataType , throwable ) ; rollback ( management ) ; throw new IndexCreationException ( "Error while creating index for type " + dataType , throwable ) ; } } commit ( management ) ; }
This is upon adding a new type to Store .
1,012
public void instanceIsActive ( ) throws AtlasException { LOG . info ( "Reacting to active: initializing index" ) ; try { initialize ( ) ; } catch ( RepositoryException | IndexException e ) { throw new AtlasException ( "Error in reacting to active on initialization" , e ) ; } }
Initialize global indices for Titan graph on server activation .
1,013
public static CreateUpdateEntitiesResult fromJson ( String json ) throws AtlasServiceException { GuidMapping guidMapping = AtlasType . fromJson ( json , GuidMapping . class ) ; EntityResult entityResult = EntityResult . fromString ( json ) ; CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult ( ) ; result . setEntityResult ( entityResult ) ; result . setGuidMapping ( guidMapping ) ; return result ; }
Deserializes the given json into an instance of CreateUpdateEntitiesResult .
1,014
public static void preUpdateCheck ( AtlasRelationshipDef newRelationshipDef , AtlasRelationshipDef existingRelationshipDef ) throws AtlasBaseException { String existingName = existingRelationshipDef . getName ( ) ; String newName = newRelationshipDef . getName ( ) ; if ( ! existingName . equals ( newName ) ) { throw new AtlasBaseException ( AtlasErrorCode . RELATIONSHIPDEF_INVALID_NAME_UPDATE , newRelationshipDef . getGuid ( ) , existingName , newName ) ; } RelationshipCategory existingRelationshipCategory = existingRelationshipDef . getRelationshipCategory ( ) ; RelationshipCategory newRelationshipCategory = newRelationshipDef . getRelationshipCategory ( ) ; if ( ! existingRelationshipCategory . equals ( newRelationshipCategory ) ) { throw new AtlasBaseException ( AtlasErrorCode . RELATIONSHIPDEF_INVALID_CATEGORY_UPDATE , newRelationshipDef . getName ( ) , newRelationshipCategory . name ( ) , existingRelationshipCategory . name ( ) ) ; } AtlasRelationshipEndDef existingEnd1 = existingRelationshipDef . getEndDef1 ( ) ; AtlasRelationshipEndDef newEnd1 = newRelationshipDef . getEndDef1 ( ) ; if ( ! newEnd1 . equals ( existingEnd1 ) ) { throw new AtlasBaseException ( AtlasErrorCode . RELATIONSHIPDEF_INVALID_END1_UPDATE , newRelationshipDef . getName ( ) , newEnd1 . toString ( ) , existingEnd1 . toString ( ) ) ; } AtlasRelationshipEndDef existingEnd2 = existingRelationshipDef . getEndDef2 ( ) ; AtlasRelationshipEndDef newEnd2 = newRelationshipDef . getEndDef2 ( ) ; if ( ! newEnd2 . equals ( existingEnd2 ) ) { throw new AtlasBaseException ( AtlasErrorCode . RELATIONSHIPDEF_INVALID_END2_UPDATE , newRelationshipDef . getName ( ) , newEnd2 . toString ( ) , existingEnd2 . toString ( ) ) ; } }
Check ends are the same and relationshipCategory is the same .
1,015
public static String selectServerId ( Configuration configuration ) throws AtlasException { String [ ] ids = configuration . getStringArray ( HAConfiguration . ATLAS_SERVER_IDS ) ; String matchingServerId = null ; int appPort = Integer . parseInt ( System . getProperty ( AtlasConstants . SYSTEM_PROPERTY_APP_PORT ) ) ; for ( String id : ids ) { String hostPort = configuration . getString ( HAConfiguration . ATLAS_SERVER_ADDRESS_PREFIX + id ) ; if ( ! StringUtils . isEmpty ( hostPort ) ) { InetSocketAddress socketAddress ; try { socketAddress = NetUtils . createSocketAddr ( hostPort ) ; } catch ( Exception e ) { LOG . warn ( "Exception while trying to get socket address for {}" , hostPort , e ) ; continue ; } if ( ! socketAddress . isUnresolved ( ) && NetUtils . isLocalAddress ( socketAddress . getAddress ( ) ) && appPort == socketAddress . getPort ( ) ) { LOG . info ( "Found matched server id {} with host port: {}" , id , hostPort ) ; matchingServerId = id ; break ; } } else { LOG . info ( "Could not find matching address entry for id: {}" , id ) ; } } if ( matchingServerId == null ) { String msg = String . format ( "Could not find server id for this instance. " + "Unable to find IDs matching any local host and port binding among %s" , StringUtils . join ( ids , "," ) ) ; throw new AtlasException ( msg ) ; } return matchingServerId ; }
Return the ID corresponding to this Atlas instance .
1,016
@ Path ( "{guid}/outputs/graph" ) @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public Response outputsGraph ( @ PathParam ( "guid" ) String guid ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "==> LineageResource.outputsGraph({})" , guid ) ; } AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "LineageResource.outputsGraph(" + guid + ")" ) ; } AtlasLineageInfo lineageInfo = atlasLineageService . getAtlasLineageInfo ( guid , LineageDirection . OUTPUT , - 1 ) ; final String result = LineageUtils . toLineageStruct ( lineageInfo , typeRegistry ) ; JSONObject response = new JSONObject ( ) ; response . put ( AtlasClient . REQUEST_ID , Servlets . getRequestId ( ) ) ; response . put ( AtlasClient . RESULTS , new JSONObject ( result ) ) ; return Response . ok ( response ) . build ( ) ; } catch ( AtlasBaseException e ) { LOG . error ( "Unable to get lineage outputs graph for entity guid={}" , guid , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e ) ) ; } catch ( WebApplicationException e ) { LOG . error ( "Unable to get lineage outputs graph for entity guid={}" , guid , e ) ; throw e ; } catch ( JSONException e ) { LOG . error ( "Unable to get lineage outputs graph for entity guid={}" , guid , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . INTERNAL_SERVER_ERROR ) ) ; } finally { AtlasPerfTracer . log ( perf ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "<== LineageResource.outputsGraph({})" , guid ) ; } } }
Returns the outputs graph for a given entity id .
1,017
@ Path ( "{guid}/schema" ) @ Consumes ( Servlets . JSON_MEDIA_TYPE ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public Response schema ( @ PathParam ( "guid" ) String guid ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "==> LineageResource.schema({})" , guid ) ; } AtlasPerfTracer perf = null ; try { if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "LineageResource.schema(" + guid + ")" ) ; } final String jsonResult = lineageService . getSchemaForEntity ( guid ) ; JSONObject response = new JSONObject ( ) ; response . put ( AtlasClient . REQUEST_ID , Servlets . getRequestId ( ) ) ; response . put ( AtlasClient . RESULTS , new JSONObject ( jsonResult ) ) ; return Response . ok ( response ) . build ( ) ; } catch ( SchemaNotFoundException e ) { LOG . error ( "schema not found for {}" , guid ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . NOT_FOUND ) ) ; } catch ( EntityNotFoundException e ) { LOG . error ( "table entity not found for {}" , guid ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . NOT_FOUND ) ) ; } catch ( DiscoveryException | IllegalArgumentException e ) { LOG . error ( "Unable to get schema for entity guid={}" , guid , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . BAD_REQUEST ) ) ; } catch ( WebApplicationException e ) { LOG . error ( "Unable to get schema for entity guid={}" , guid , e ) ; throw e ; } catch ( Throwable e ) { LOG . error ( "Unable to get schema for entity={}" , guid , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . INTERNAL_SERVER_ERROR ) ) ; } finally { AtlasPerfTracer . log ( perf ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "<== LineageResource.schema({})" , guid ) ; } } }
Returns the schema for the given dataset id .
1,018
public static Referenceable createClusterEntity ( final org . apache . falcon . entity . v0 . cluster . Cluster cluster ) { LOG . info ( "Creating cluster Entity : {}" , cluster . getName ( ) ) ; Referenceable clusterRef = new Referenceable ( FalconDataTypes . FALCON_CLUSTER . getName ( ) ) ; clusterRef . set ( AtlasClient . NAME , cluster . getName ( ) ) ; clusterRef . set ( AtlasClient . DESCRIPTION , cluster . getDescription ( ) ) ; clusterRef . set ( AtlasClient . REFERENCEABLE_ATTRIBUTE_NAME , cluster . getName ( ) ) ; clusterRef . set ( FalconBridge . COLO , cluster . getColo ( ) ) ; if ( cluster . getACL ( ) != null ) { clusterRef . set ( AtlasClient . OWNER , cluster . getACL ( ) . getGroup ( ) ) ; } if ( StringUtils . isNotEmpty ( cluster . getTags ( ) ) ) { clusterRef . set ( FalconBridge . TAGS , EventUtil . convertKeyValueStringToMap ( cluster . getTags ( ) ) ) ; } return clusterRef ; }
Creates cluster entity
1,019
private Properties getConsumerProperties ( NotificationType type ) { String groupId = properties . getProperty ( type . toString ( ) . toLowerCase ( ) + "." + CONSUMER_GROUP_ID_PROPERTY ) ; if ( StringUtils . isEmpty ( groupId ) ) { throw new IllegalStateException ( "No configuration group id set for the notification type " + type ) ; } Properties consumerProperties = new Properties ( ) ; consumerProperties . putAll ( properties ) ; consumerProperties . put ( ConsumerConfig . GROUP_ID_CONFIG , groupId ) ; LOG . info ( "Consumer property: atlas.kafka.enable.auto.commit: {}" , consumerProperties . getProperty ( "enable.auto.commit" ) ) ; return consumerProperties ; }
Get properties for consumer request
1,020
public List < String > createTraitType ( String traitName , ImmutableSet < String > superTraits , AttributeDefinition ... attributeDefinitions ) throws AtlasServiceException { HierarchicalTypeDefinition < TraitType > piiTrait = TypesUtil . createTraitTypeDef ( traitName , superTraits , attributeDefinitions ) ; String traitDefinitionAsJSON = TypesSerialization . toJson ( piiTrait , true ) ; LOG . debug ( "Creating trait type {} {}" , traitName , traitDefinitionAsJSON ) ; return createType ( traitDefinitionAsJSON ) ; }
Creates trait type with specifiedName superTraits and attributes
1,021
public List < String > listTypes ( ) throws AtlasServiceException { final JSONObject jsonObject = callAPIWithQueryParams ( API . LIST_TYPES , null ) ; return extractResults ( jsonObject , AtlasClient . RESULTS , new ExtractOperation < String , String > ( ) ) ; }
Returns all type names in the system
1,022
public List < String > listTypes ( final DataTypes . TypeCategory category ) throws AtlasServiceException { JSONObject response = callAPIWithRetries ( API . LIST_TYPES , null , new ResourceCreator ( ) { public WebResource createResource ( ) { WebResource resource = getResource ( API . LIST_TYPES . getPath ( ) ) ; resource = resource . queryParam ( TYPE , category . name ( ) ) ; return resource ; } } ) ; return extractResults ( response , AtlasClient . RESULTS , new ExtractOperation < String , String > ( ) ) ; }
Returns all type names with the given category
1,023
public EntityResult updateEntityAttribute ( final String guid , final String attribute , String value ) throws AtlasServiceException { LOG . debug ( "Updating entity id: {}, attribute name: {}, attribute value: {}" , guid , attribute , value ) ; JSONObject response = callAPIWithRetries ( API . UPDATE_ENTITY_PARTIAL , value , new ResourceCreator ( ) { public WebResource createResource ( ) { API api = API . UPDATE_ENTITY_PARTIAL ; WebResource resource = getResource ( api , guid ) ; resource = resource . queryParam ( ATTRIBUTE_NAME , attribute ) ; return resource ; } } ) ; return extractEntityResult ( response ) ; }
Supports Partial updates Updates property for the entity corresponding to guid
1,024
public void addTrait ( String guid , Struct traitDefinition ) throws AtlasServiceException { String traitJson = InstanceSerialization . toJson ( traitDefinition , true ) ; LOG . debug ( "Adding trait to entity with id {} {}" , guid , traitJson ) ; callAPIWithBodyAndParams ( API . ADD_TRAITS , traitJson , guid , URI_TRAITS ) ; }
Associate trait to an entity
1,025
public void deleteTrait ( String guid , String traitName ) throws AtlasServiceException { callAPIWithBodyAndParams ( API . DELETE_TRAITS , null , guid , TRAITS , traitName ) ; }
Delete a trait from the given entity
1,026
public EntityResult deleteEntities ( final String ... guids ) throws AtlasServiceException { LOG . debug ( "Deleting entities: {}" , guids ) ; JSONObject jsonResponse = callAPIWithRetries ( API . DELETE_ENTITIES , null , new ResourceCreator ( ) { public WebResource createResource ( ) { API api = API . DELETE_ENTITIES ; WebResource resource = getResource ( api ) ; for ( String guid : guids ) { resource = resource . queryParam ( GUID . toLowerCase ( ) , guid ) ; } return resource ; } } ) ; EntityResult results = extractEntityResult ( jsonResponse ) ; LOG . debug ( "Delete entities returned results: {}" , results ) ; return results ; }
Delete the specified entities from the repository
1,027
public EntityResult deleteEntity ( String entityType , String uniqueAttributeName , String uniqueAttributeValue ) throws AtlasServiceException { LOG . debug ( "Deleting entity type: {}, attributeName: {}, attributeValue: {}" , entityType , uniqueAttributeName , uniqueAttributeValue ) ; API api = API . DELETE_ENTITY ; WebResource resource = getResource ( api ) ; resource = resource . queryParam ( TYPE , entityType ) ; resource = resource . queryParam ( ATTRIBUTE_NAME , uniqueAttributeName ) ; resource = resource . queryParam ( ATTRIBUTE_VALUE , uniqueAttributeValue ) ; JSONObject jsonResponse = callAPIWithResource ( API . DELETE_ENTITIES , resource ) ; EntityResult results = extractEntityResult ( jsonResponse ) ; LOG . debug ( "Delete entities returned results: {}" , results ) ; return results ; }
Supports Deletion of an entity identified by its unique attribute value
1,028
public List < String > listEntities ( final String entityType ) throws AtlasServiceException { JSONObject jsonResponse = callAPIWithRetries ( API . LIST_ENTITIES , null , new ResourceCreator ( ) { public WebResource createResource ( ) { WebResource resource = getResource ( API . LIST_ENTITIES ) ; resource = resource . queryParam ( TYPE , entityType ) ; return resource ; } } ) ; return extractResults ( jsonResponse , AtlasClient . RESULTS , new ExtractOperation < String , String > ( ) ) ; }
List entities for a given entity type
1,029
public List < String > listTraits ( final String guid ) throws AtlasServiceException { JSONObject jsonResponse = callAPIWithBodyAndParams ( API . LIST_TRAITS , null , guid , URI_TRAITS ) ; return extractResults ( jsonResponse , AtlasClient . RESULTS , new ExtractOperation < String , String > ( ) ) ; }
List traits for a given entity identified by its GUID
1,030
public List < Struct > listTraitDefinitions ( final String guid ) throws AtlasServiceException { JSONObject jsonResponse = callAPIWithBodyAndParams ( API . GET_ALL_TRAIT_DEFINITIONS , null , guid , TRAIT_DEFINITIONS ) ; List < JSONObject > traitDefList = extractResults ( jsonResponse , AtlasClient . RESULTS , new ExtractOperation < JSONObject , JSONObject > ( ) ) ; ArrayList < Struct > traitStructList = new ArrayList < > ( ) ; for ( JSONObject traitDef : traitDefList ) { Struct traitStruct = InstanceSerialization . fromJsonStruct ( traitDef . toString ( ) , true ) ; traitStructList . add ( traitStruct ) ; } return traitStructList ; }
Get all trait definitions for an entity
1,031
public Struct getTraitDefinition ( final String guid , final String traitName ) throws AtlasServiceException { JSONObject jsonResponse = callAPIWithBodyAndParams ( API . GET_TRAIT_DEFINITION , null , guid , TRAIT_DEFINITIONS , traitName ) ; try { return InstanceSerialization . fromJsonStruct ( jsonResponse . getString ( AtlasClient . RESULTS ) , false ) ; } catch ( JSONException e ) { throw new AtlasServiceException ( API . GET_TRAIT_DEFINITION , e ) ; } }
Get trait definition for a given entity and traitname
1,032
public List < EntityAuditEvent > getEntityAuditEvents ( String entityId , short numResults ) throws AtlasServiceException { return getEntityAuditEvents ( entityId , null , numResults ) ; }
Get the latest numResults entity audit events in decreasing order of timestamp for the given entity id
1,033
public JSONArray searchByDSL ( final String query , final int limit , final int offset ) throws AtlasServiceException { LOG . debug ( "DSL query: {}" , query ) ; JSONObject result = callAPIWithRetries ( API . SEARCH_DSL , null , new ResourceCreator ( ) { public WebResource createResource ( ) { WebResource resource = getResource ( API . SEARCH_DSL ) ; resource = resource . queryParam ( QUERY , query ) ; resource = resource . queryParam ( LIMIT , String . valueOf ( limit ) ) ; resource = resource . queryParam ( OFFSET , String . valueOf ( offset ) ) ; return resource ; } } ) ; try { return result . getJSONArray ( RESULTS ) ; } catch ( JSONException e ) { throw new AtlasServiceException ( e ) ; } }
Search given query DSL
1,034
public JSONObject searchByFullText ( final String query , final int limit , final int offset ) throws AtlasServiceException { return callAPIWithRetries ( API . SEARCH_FULL_TEXT , null , new ResourceCreator ( ) { public WebResource createResource ( ) { WebResource resource = getResource ( API . SEARCH_FULL_TEXT ) ; resource = resource . queryParam ( QUERY , query ) ; resource = resource . queryParam ( LIMIT , String . valueOf ( limit ) ) ; resource = resource . queryParam ( OFFSET , String . valueOf ( offset ) ) ; return resource ; } } ) ; }
Search given full text search
1,035
public JSONObject callAPIWithResource ( API api , WebResource resource ) throws AtlasServiceException { return callAPIWithResource ( toAPIInfo ( api ) , resource , null , JSONObject . class ) ; }
Wrapper methods for compatibility
1,036
public void writeBoolean ( boolean value ) { try { buffer . writeInt ( 1 ) ; if ( value ) { buffer . writeByte ( 1 ) ; } else { buffer . writeByte ( 0 ) ; } } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive boolean to the output stream
1,037
public void writeShort ( int value ) { try { buffer . writeInt ( 2 ) ; buffer . writeShort ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive short to the output stream
1,038
public void writeInt ( int value ) { try { buffer . writeInt ( 4 ) ; buffer . writeInt ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive integer to the output stream
1,039
public void writeLong ( long value ) { try { buffer . writeInt ( 8 ) ; buffer . writeLong ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive long to the output stream
1,040
public void writeFloat ( float value ) { try { buffer . writeInt ( 4 ) ; buffer . writeFloat ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive float to the output stream
1,041
public void writeDouble ( double value ) { try { buffer . writeInt ( 8 ) ; buffer . writeDouble ( value ) ; } catch ( Exception e ) { throw new BinaryWriteFailedException ( e ) ; } }
Writes primitive double to the output stream
1,042
@ SuppressWarnings ( "checkstyle:magicnumber" ) private static long toPgSecs ( final long seconds ) { long secs = seconds ; secs -= 946684800L ; if ( secs < - 13165977600L ) { secs -= 86400 * 10 ; if ( secs < - 15773356800L ) { int years = ( int ) ( ( secs + 15773356800L ) / - 3155823050L ) ; years ++ ; years -= years / 4 ; secs += years * 86400 ; } } return secs ; }
Converts the given java seconds to postgresql seconds . The conversion is valid for any year 100 BC onwards .
1,043
public void saveAll ( PGConnection connection , Stream < TEntity > entities ) throws SQLException { try ( PgBinaryWriter bw = new PgBinaryWriter ( configuration . getBufferSize ( ) ) ) { bw . open ( new PGCopyOutputStream ( connection , mapping . getCopyCommand ( ) , 1 ) ) ; entities . forEach ( entity -> saveEntitySynchonized ( bw , entity ) ) ; } }
Save stream of entities
1,044
static String capitalizeFirstWordAsciiOnly ( String s ) { if ( s == null || s . isEmpty ( ) ) { return s ; } int secondWordStart = s . length ( ) ; for ( int i = 1 ; i < s . length ( ) ; i ++ ) { if ( ! isLowerCaseAsciiOnly ( s . charAt ( i ) ) ) { secondWordStart = i ; break ; } } return toUpperCaseAsciiOnly ( s . substring ( 0 , secondWordStart ) ) + s . substring ( secondWordStart ) ; }
fooBar - > FOOBar FooBar - > FOOBar foo - > FOO
1,045
static ExecutableElement findLargestPublicConstructor ( TypeElement typeElement ) { List < ExecutableElement > constructors = FluentIterable . from ( ElementFilter . constructorsIn ( typeElement . getEnclosedElements ( ) ) ) . filter ( FILTER_NON_PUBLIC ) . toList ( ) ; if ( constructors . size ( ) == 0 ) { return null ; } return PARAMETER_COUNT_ORDER . max ( constructors ) ; }
Returns the public constructor in a given class with the largest number of arguments or null if there are no public constructors .
1,046
static boolean isSingleton ( Types types , TypeElement element ) { return isSingleton ( types , element , element . asType ( ) ) ; }
A singleton is defined by a class with a public static final field named INSTANCE with a type assignable from itself .
1,047
static boolean isParcelable ( Elements elements , Types types , TypeMirror type ) { TypeMirror parcelableType = elements . getTypeElement ( PARCELABLE_CLASS_NAME ) . asType ( ) ; return types . isAssignable ( type , parcelableType ) ; }
Returns true if a type implements Parcelable
1,048
static AnnotationMirror getAnnotationWithSimpleName ( Element element , String name ) { for ( AnnotationMirror mirror : element . getAnnotationMirrors ( ) ) { String annotationName = mirror . getAnnotationType ( ) . asElement ( ) . getSimpleName ( ) . toString ( ) ; if ( name . equals ( annotationName ) ) { return mirror ; } } return null ; }
Finds an annotation with the given name on the given element or null if not found .
1,049
private static boolean extractAndLoadLibraryFile ( String libFolderForCurrentOS , String libraryFileName , String targetFolder ) { String nativeLibraryFilePath = libFolderForCurrentOS + "/" + libraryFileName ; String uuid = UUID . randomUUID ( ) . toString ( ) ; String extractedLibFileName = String . format ( "secp256k1-%s-%s" , uuid , libraryFileName ) ; String extractedLckFileName = extractedLibFileName + ".lck" ; File extractedLibFile = new File ( targetFolder , extractedLibFileName ) ; File extractedLckFile = new File ( targetFolder , extractedLckFileName ) ; try { InputStream reader = Secp256k1Loader . class . getResourceAsStream ( nativeLibraryFilePath ) ; if ( ! extractedLckFile . exists ( ) ) { new FileOutputStream ( extractedLckFile ) . close ( ) ; } FileOutputStream writer = new FileOutputStream ( extractedLibFile ) ; try { byte [ ] buffer = new byte [ 8192 ] ; int bytesRead = 0 ; while ( ( bytesRead = reader . read ( buffer ) ) != - 1 ) { writer . write ( buffer , 0 , bytesRead ) ; } } finally { extractedLibFile . deleteOnExit ( ) ; extractedLckFile . deleteOnExit ( ) ; if ( writer != null ) { writer . close ( ) ; } if ( reader != null ) { reader . close ( ) ; } } extractedLibFile . setReadable ( true ) ; extractedLibFile . setWritable ( true , true ) ; extractedLibFile . setExecutable ( true ) ; { InputStream nativeIn = Secp256k1Loader . class . getResourceAsStream ( nativeLibraryFilePath ) ; InputStream extractedLibIn = new FileInputStream ( extractedLibFile ) ; try { if ( ! contentsEquals ( nativeIn , extractedLibIn ) ) { throw new RuntimeException ( String . format ( "Failed to write a native library file at %s" , extractedLibFile ) ) ; } } finally { if ( nativeIn != null ) { nativeIn . close ( ) ; } if ( extractedLibIn != null ) { extractedLibIn . close ( ) ; } } } return loadNativeLibrary ( targetFolder , extractedLibFileName ) ; } catch ( IOException e ) { System . err . println ( e . getMessage ( ) ) ; return false ; } }
Extracts and loads the specified library file to the target folder
1,050
private static boolean loadNativeLibrary ( String path , String name ) { File libPath = new File ( path , name ) ; if ( libPath . exists ( ) ) { try { System . load ( new File ( path , name ) . getAbsolutePath ( ) ) ; return true ; } catch ( UnsatisfiedLinkError e ) { System . err . println ( "Failed to load native library:" + name + ". osinfo: " + OSInfo . getNativeLibFolderPathForCurrentOS ( ) ) ; System . err . println ( e ) ; return false ; } } else { return false ; } }
Loads native library using the given path and name of the library .
1,051
private static void loadSecp256k1NativeLibrary ( ) throws Exception { if ( extracted ) { return ; } String secp256k1NativeLibraryPath = System . getProperty ( "fr.acinq.secp256k1.lib.path" ) ; String secp256k1NativeLibraryName = System . getProperty ( "fr.acinq.secp256k1.lib.name" ) ; if ( secp256k1NativeLibraryName == null ) { secp256k1NativeLibraryName = System . mapLibraryName ( "secp256k1" ) ; if ( secp256k1NativeLibraryName != null && secp256k1NativeLibraryName . endsWith ( ".dylib" ) ) { secp256k1NativeLibraryName = secp256k1NativeLibraryName . replace ( ".dylib" , ".jnilib" ) ; } } if ( secp256k1NativeLibraryPath != null ) { if ( loadNativeLibrary ( secp256k1NativeLibraryPath , secp256k1NativeLibraryName ) ) { extracted = true ; return ; } } String packagePath = Secp256k1Loader . class . getPackage ( ) . getName ( ) . replaceAll ( "\\." , "/" ) ; secp256k1NativeLibraryPath = String . format ( "/%s/native/%s" , packagePath , OSInfo . getNativeLibFolderPathForCurrentOS ( ) ) ; boolean hasNativeLib = hasResource ( secp256k1NativeLibraryPath + "/" + secp256k1NativeLibraryName ) ; if ( ! hasNativeLib ) { if ( OSInfo . getOSName ( ) . equals ( "Mac" ) ) { String altName = "libsecp256k1.jnilib" ; if ( hasResource ( secp256k1NativeLibraryPath + "/" + altName ) ) { secp256k1NativeLibraryName = altName ; hasNativeLib = true ; } } } if ( ! hasNativeLib ) { extracted = false ; throw new Exception ( String . format ( "No native library is found for os.name=%s and os.arch=%s. path=%s" , OSInfo . getOSName ( ) , OSInfo . getArchName ( ) , secp256k1NativeLibraryPath ) ) ; } String tempFolder = getTempDir ( ) . getAbsolutePath ( ) ; if ( extractAndLoadLibraryFile ( secp256k1NativeLibraryPath , secp256k1NativeLibraryName , tempFolder ) ) { extracted = true ; return ; } extracted = false ; return ; }
Loads secp256k1 native library using given path and name of the library .
1,052
public static String absoluteHrefOf ( final String path ) { try { return fromCurrentServletMapping ( ) . path ( path ) . build ( ) . toString ( ) ; } catch ( final IllegalStateException e ) { return path ; } }
Returns an absolute URL for the specified path .
1,053
public void disable ( final String jobType , final String comment ) { setValue ( jobType , KEY_DISABLED , comment != null ? comment : "" ) ; }
Disables a job type i . e . prevents it from being started
1,054
public Set < String > findAllJobTypes ( ) { return stream ( collection . find ( ) . maxTime ( 500 , TimeUnit . MILLISECONDS ) . spliterator ( ) , false ) . map ( doc -> doc . getString ( ID ) ) . collect ( toSet ( ) ) ; }
Returns all job types having state information .
1,055
public boolean update ( final V value , final long maxTime , final TimeUnit timeUnit ) { final K key = keyOf ( value ) ; if ( key != null ) { return collectionWithWriteTimeout ( maxTime , timeUnit ) . replaceOne ( byId ( key ) , encode ( value ) ) . getModifiedCount ( ) == 1 ; } else { throw new IllegalArgumentException ( "Key must not be null" ) ; } }
Updates the document if it is already present in the repository .
1,056
public void delete ( final K key , final long maxTime , final TimeUnit timeUnit ) { collectionWithWriteTimeout ( maxTime , timeUnit ) . deleteOne ( byId ( key ) ) ; }
Deletes the document identified by key .
1,057
protected Document byId ( final K key ) { if ( key != null ) { return new Document ( ID , key . toString ( ) ) ; } else { throw new NullPointerException ( "Key must not be null" ) ; } }
Returns a query that is selecting documents by ID .
1,058
public JobMeta getJobMeta ( String jobType ) { final Map < String , String > document = map . get ( jobType ) ; if ( document != null ) { final Map < String , String > meta = document . keySet ( ) . stream ( ) . filter ( key -> ! key . startsWith ( "_e_" ) ) . collect ( toMap ( key -> key , document :: get ) ) ; final boolean isRunning = document . containsKey ( KEY_RUNNING ) ; final boolean isDisabled = document . containsKey ( KEY_DISABLED ) ; final String comment = document . get ( KEY_DISABLED ) ; return new JobMeta ( jobType , isRunning , isDisabled , comment , meta ) ; } else { return new JobMeta ( jobType , false , false , "" , emptyMap ( ) ) ; } }
Returns the current state of the specified job type .
1,059
public static ServiceType serviceType ( final String type , final Criticality criticality , final String disasterImpact ) { return new ServiceType ( type , criticality , disasterImpact ) ; }
Creates a ServiceType .
1,060
public static EdisonApplicationProperties edisonApplicationProperties ( final String title , final String group , final String environment , final String description ) { final EdisonApplicationProperties edisonApplicationProperties = new EdisonApplicationProperties ( ) ; edisonApplicationProperties . setTitle ( title ) ; edisonApplicationProperties . setGroup ( group ) ; edisonApplicationProperties . setEnvironment ( environment ) ; edisonApplicationProperties . setDescription ( description ) ; edisonApplicationProperties . setManagement ( new Management ( "/internal" ) ) ; return edisonApplicationProperties ; }
Only used in tests .
1,061
public StatusDetail statusDetail ( final JobDefinition jobDefinition ) { try { final List < JobInfo > jobs = jobRepository . findLatestBy ( jobDefinition . jobType ( ) , numberOfJobs + 1 ) ; return jobs . isEmpty ( ) ? statusDetailWhenNoJobAvailable ( jobDefinition ) : toStatusDetail ( jobs , jobDefinition ) ; } catch ( final Exception e ) { LOG . error ( LOAD_JOBS_EXCEPTION_MESSAGE + ": " + e . getMessage ( ) , e ) ; return StatusDetail . statusDetail ( jobDefinition . jobName ( ) , ERROR , LOAD_JOBS_EXCEPTION_MESSAGE ) ; } }
Returns a StatusDetail for a JobDefinition . The Status of the StatusDetail is calculated using the last job executions and depends on the configuration of the calculator .
1,062
protected StatusDetail toStatusDetail ( final List < JobInfo > jobInfos , final JobDefinition jobDefinition ) { final Status status ; final String message ; final JobInfo currentJob = jobInfos . get ( 0 ) ; final JobInfo lastJob = ( ! currentJob . getStopped ( ) . isPresent ( ) && currentJob . getStatus ( ) == JobStatus . OK && jobInfos . size ( ) > 1 ) ? jobInfos . get ( 1 ) : jobInfos . get ( 0 ) ; final JobMeta jobMeta = getJobMeta ( jobDefinition . jobType ( ) ) ; long numFailedJobs = getNumFailedJobs ( jobInfos ) ; if ( ! jobMeta . isDisabled ( ) ) { switch ( lastJob . getStatus ( ) ) { case OK : case SKIPPED : if ( jobTooOld ( lastJob , jobDefinition ) ) { status = WARNING ; message = jobAgeMessage ( jobDefinition ) ; } else if ( numFailedJobs > maxFailedJobs ) { status = WARNING ; message = format ( TOO_MANY_JOBS_FAILED_MESSAGE , numFailedJobs , jobInfos . size ( ) ) ; } else { status = OK ; message = SUCCESS_MESSAGE ; } break ; case ERROR : if ( numFailedJobs > maxFailedJobs ) { status = ERROR ; } else { status = WARNING ; } if ( numberOfJobs == 1 && maxFailedJobs <= 1 ) { message = ERROR_MESSAGE ; } else { message = format ( TOO_MANY_JOBS_FAILED_MESSAGE , numFailedJobs , jobInfos . size ( ) ) ; } break ; case DEAD : default : status = WARNING ; message = DEAD_MESSAGE ; } } else { status = OK ; message = format ( JOB_DEACTIVATED_MESSAGE , jobMeta . getDisabledComment ( ) ) ; } return StatusDetail . statusDetail ( jobDefinition . jobName ( ) , status , message , asList ( link ( REL_JOB , String . format ( "%s/jobs/%s" , managementContextPath , lastJob . getJobId ( ) ) , "Details" ) ) , runningDetailsFor ( lastJob ) ) ; }
Calculates the StatusDetail from the last job executions .
1,063
protected final long getNumFailedJobs ( final List < JobInfo > jobInfos ) { return jobInfos . stream ( ) . filter ( job -> JobStatus . ERROR . equals ( job . getStatus ( ) ) ) . count ( ) ; }
Returns the number of failed jobs .
1,064
protected Map < String , String > runningDetailsFor ( final JobInfo jobInfo ) { final Map < String , String > details = new HashMap < > ( ) ; details . put ( "Started" , ISO_DATE_TIME . format ( jobInfo . getStarted ( ) ) ) ; if ( jobInfo . getStopped ( ) . isPresent ( ) ) { details . put ( "Stopped" , ISO_DATE_TIME . format ( jobInfo . getStopped ( ) . get ( ) ) ) ; } return details ; }
Returns additional information like job uri running state started and stopped timestamps .
1,065
protected boolean jobTooOld ( final JobInfo jobInfo , final JobDefinition jobDefinition ) { final Optional < OffsetDateTime > stopped = jobInfo . getStopped ( ) ; if ( stopped . isPresent ( ) && jobDefinition . maxAge ( ) . isPresent ( ) ) { final OffsetDateTime deadlineToRerun = stopped . get ( ) . plus ( jobDefinition . maxAge ( ) . get ( ) ) ; return deadlineToRerun . isBefore ( now ( ) ) ; } return false ; }
Calculates whether or not the last job execution is too old .
1,066
@ Scheduled ( fixedRate = KEEP_LAST_JOBS_CLEANUP_INTERVAL ) public void doCleanUp ( ) { final List < JobInfo > jobs = jobRepository . findAllJobInfoWithoutMessages ( ) ; findJobsToDelete ( jobs ) . forEach ( jobInfo -> jobRepository . removeIfStopped ( jobInfo . getJobId ( ) ) ) ; }
Execute the cleanup of the given repository .
1,067
public Optional < JobDefinition > getJobDefinition ( final String jobType ) { return jobDefinitions . stream ( ) . filter ( ( j ) -> j . jobType ( ) . equalsIgnoreCase ( jobType ) ) . findAny ( ) ; }
Returns an optional JobDefinition matching the given jobType .
1,068
public Optional < String > startAsyncJob ( String jobType ) { try { final JobRunnable jobRunnable = findJobRunnable ( jobType ) ; final JobInfo jobInfo = createJobInfo ( jobType ) ; jobMetaService . aquireRunLock ( jobInfo . getJobId ( ) , jobInfo . getJobType ( ) ) ; jobRepository . createOrUpdate ( jobInfo ) ; return Optional . of ( startAsync ( metered ( jobRunnable ) , jobInfo . getJobId ( ) ) ) ; } catch ( JobBlockedException e ) { LOG . info ( e . getMessage ( ) ) ; return Optional . empty ( ) ; } }
Starts a job asynchronously in the background .
1,069
public List < JobInfo > findJobs ( final Optional < String > type , final int count ) { if ( type . isPresent ( ) ) { return jobRepository . findLatestBy ( type . get ( ) , count ) ; } else { return jobRepository . findLatest ( count ) ; } }
Find the latest jobs optionally restricted to jobs of a specified type .
1,070
private void clearRunLocks ( ) { jobMetaService . runningJobs ( ) . forEach ( ( RunningJob runningJob ) -> { final Optional < JobInfo > jobInfoOptional = jobRepository . findOne ( runningJob . jobId ) ; if ( jobInfoOptional . isPresent ( ) && jobInfoOptional . get ( ) . isStopped ( ) ) { jobMetaService . releaseRunLock ( runningJob . jobType ) ; LOG . error ( "Clear Lock of Job {}. Job stopped already." , runningJob . jobType ) ; } else if ( ! jobInfoOptional . isPresent ( ) ) { jobMetaService . releaseRunLock ( runningJob . jobType ) ; LOG . error ( "Clear Lock of Job {}. JobID does not exist" , runningJob . jobType ) ; } } ) ; }
Checks all run locks and releases the lock if the job is stopped .
1,071
public static VersionInfoProperties versionInfoProperties ( final String version , final String commit , final String urlTemplate ) { final VersionInfoProperties p = new VersionInfoProperties ( ) ; p . version = version ; p . commit = commit ; p . urlTemplate = urlTemplate ; return p ; }
Used for testing purposes .
1,072
public StatusDetail withDetail ( final String key , final String value ) { final LinkedHashMap < String , String > newDetails = new LinkedHashMap < > ( details ) ; newDetails . put ( key , value ) ; return statusDetail ( name , status , message , newDetails ) ; }
Create a copy of this StatusDetail add a detail and return the new StatusDetail .
1,073
public StatusDetail withoutDetail ( final String key ) { final LinkedHashMap < String , String > newDetails = new LinkedHashMap < > ( details ) ; newDetails . remove ( key ) ; return statusDetail ( name , status , message , newDetails ) ; }
Create a copy of this StatusDetail remove a detail and return the new StatusDetail .
1,074
public static DatasourceDependencyBuilder mongoDependency ( final List < Datasource > datasources ) { return new DatasourceDependencyBuilder ( ) . withDatasources ( datasources ) . withType ( DatasourceDependency . TYPE_DB ) . withSubtype ( DatasourceDependency . SUBTYPE_MONGODB ) ; }
Creates a ServiceDependencyBuilder with type = db and subtype = MongoDB .
1,075
public static DatasourceDependencyBuilder redisDependency ( final List < Datasource > datasources ) { return new DatasourceDependencyBuilder ( ) . withDatasources ( datasources ) . withType ( DatasourceDependency . TYPE_DB ) . withSubtype ( DatasourceDependency . SUBTYPE_REDIS ) ; }
Creates a ServiceDependencyBuilder with type = db and subtype = Redis .
1,076
public static DatasourceDependencyBuilder cassandraDependency ( final List < Datasource > datasources ) { return new DatasourceDependencyBuilder ( ) . withDatasources ( datasources ) . withType ( DatasourceDependency . TYPE_DB ) . withSubtype ( DatasourceDependency . SUBTYPE_CASSANDRA ) ; }
Creates a ServiceDependencyBuilder with type = db and subtype = Cassandra .
1,077
public static DatasourceDependencyBuilder elasticSearchDependency ( final List < Datasource > datasources ) { return new DatasourceDependencyBuilder ( ) . withDatasources ( datasources ) . withType ( DatasourceDependency . TYPE_DB ) . withSubtype ( DatasourceDependency . SUBTYPE_ELASTICSEARCH ) ; }
Creates a ServiceDependencyBuilder with type = db and subtype = ElasticSearch .
1,078
public static DatasourceDependencyBuilder kafkaDependency ( final List < Datasource > datasources ) { return new DatasourceDependencyBuilder ( ) . withDatasources ( datasources ) . withType ( DatasourceDependency . TYPE_QUEUE ) . withSubtype ( DatasourceDependency . SUBTYPE_KAFKA ) ; }
Creates a ServiceDependencyBuilder with type = queue and subtype = Kafka .
1,079
public static LdapProperties ldapProperties ( final String host , final int port , final List < String > baseDn , final String roleBaseDn , final String rdnIdentifier , final List < String > prefix , final EncryptionType encryptionType , final String ... whitelistedPaths ) { final LdapProperties ldap = new LdapProperties ( ) ; ldap . setEnabled ( true ) ; ldap . setHost ( host ) ; ldap . setPort ( port ) ; ldap . setBaseDn ( baseDn ) ; ldap . setRoleBaseDn ( roleBaseDn ) ; ldap . setRdnIdentifier ( rdnIdentifier ) ; ldap . setPrefixes ( prefix ) ; ldap . setEncryptionType ( encryptionType ) ; ldap . setWhitelistedPaths ( asList ( whitelistedPaths ) ) ; return ldap ; }
Creates Ldap properties . Primarily used in tests .
1,080
public static ServiceDependencyBuilder restServiceDependency ( final String url ) { return new ServiceDependencyBuilder ( ) . withUrl ( url ) . withType ( ServiceDependency . TYPE_SERVICE ) . withSubtype ( ServiceDependency . SUBTYPE_REST ) . withMethods ( singletonList ( "GET" ) ) . withMediaTypes ( singletonList ( "application/json" ) ) ; }
Creates a ServiceDependencyBuilder with type = service and subtype = REST .
1,081
public static ServiceDependencyBuilder serviceDependency ( final String url ) { return new ServiceDependencyBuilder ( ) . withUrl ( url ) . withType ( ServiceDependency . TYPE_SERVICE ) . withSubtype ( ServiceDependency . SUBTYPE_OTHER ) ; }
Creates a generic ServiceDependencyBuilder with type = service and subtype = OTHER .
1,082
public static Datasource datasource ( final String node , final int port , final String resource ) { return new Datasource ( node , port , resource ) ; }
Creates a Datasource from node port and resource descriptors .
1,083
public static JobDefinition manuallyTriggerableJobDefinition ( final String jobType , final String jobName , final String description , final int restarts , final Optional < Duration > maxAge ) { return new DefaultJobDefinition ( jobType , jobName , description , maxAge , Optional . empty ( ) , Optional . empty ( ) , restarts , 0 , Optional . empty ( ) ) ; }
Create a JobDefinition for a job that will not be triggered automatically by a job trigger .
1,084
public byte [ ] getData ( ) { byte [ ] data = new byte [ frameData . length + 2 ] ; int first2 = ( FRAME_SYNC << 2 ) ; if ( blockSizeVariable ) first2 ++ ; IOUtils . putInt2BE ( data , 0 , first2 ) ; System . arraycopy ( frameData , 0 , data , 2 , frameData . length ) ; return data ; }
Returns the contents including the sync header
1,085
public void setUtc ( String utc ) { if ( utc == null ) { this . utc = null ; } else { if ( utc . length ( ) != 20 ) { throw new IllegalArgumentException ( "Must be of the form YYYYMMDDTHHMMSS.sssZ" ) ; } } }
Sets the ISO - 8601 UTC time of the file which must be YYYYMMDDTHHMMSS . sssZ or null
1,086
public void processPacket ( OggPacket packet ) { SkeletonPacket skel = SkeletonPacketFactory . create ( packet ) ; if ( packet . isBeginningOfStream ( ) ) { fishead = ( SkeletonFishead ) skel ; } else if ( skel instanceof SkeletonFisbone ) { SkeletonFisbone bone = ( SkeletonFisbone ) skel ; fisbones . add ( bone ) ; bonesByStream . put ( bone . getSerialNumber ( ) , bone ) ; } else if ( skel instanceof SkeletonKeyFramePacket ) { keyFrames . add ( ( SkeletonKeyFramePacket ) skel ) ; } else { throw new IllegalStateException ( "Unexpected Skeleton " + skel ) ; } if ( packet . isEndOfStream ( ) ) { hasWholeStream = true ; } }
Processes and tracks the next packet for the stream
1,087
public SkeletonFisbone addBoneForStream ( int sid ) { SkeletonFisbone bone = new SkeletonFisbone ( ) ; bone . setSerialNumber ( sid ) ; fisbones . add ( bone ) ; if ( sid == - 1 || bonesByStream . containsKey ( sid ) ) { throw new IllegalArgumentException ( "Invalid / duplicate sid " + sid ) ; } bonesByStream . put ( sid , bone ) ; return bone ; }
Adds a new fisbone for the given stream
1,088
protected int addPacket ( OggPacket packet , int offset ) { if ( packet . isBeginningOfStream ( ) ) { isBOS = true ; } if ( packet . isEndOfStream ( ) ) { isEOS = true ; } int size = packet . getData ( ) . length ; for ( int i = numLVs ; i < 255 ; i ++ ) { int remains = size - offset ; int toAdd = 255 ; if ( remains < 255 ) { toAdd = remains ; } lvs [ i ] = IOUtils . fromInt ( toAdd ) ; tmpData . write ( packet . getData ( ) , offset , toAdd ) ; numLVs ++ ; offset += toAdd ; if ( toAdd < 255 ) { break ; } } return offset ; }
Adds as much of the packet s data as we can do .
1,089
public boolean isChecksumValid ( ) { if ( checksum == 0 ) return true ; int crc = CRCUtils . getCRC ( getHeader ( ) ) ; if ( data != null && data . length > 0 ) { crc = CRCUtils . getCRC ( data , crc ) ; } return ( checksum == crc ) ; }
Is the checksum for the page valid?
1,090
protected boolean hasSpaceFor ( int bytes ) { int reqLVs = ( int ) Math . ceil ( bytes / 255.0 ) ; if ( numLVs + reqLVs > 255 ) { return false ; } return true ; }
Does this Page have space for the given number of bytes?
1,091
public int getDataSize ( ) { int size = 0 ; for ( int i = 0 ; i < numLVs ; i ++ ) { size += IOUtils . toInt ( lvs [ i ] ) ; } return size ; }
How big is the page excluding headers?
1,092
protected byte [ ] getHeader ( ) { byte [ ] header = new byte [ MINIMUM_PAGE_SIZE + numLVs ] ; header [ 0 ] = ( byte ) 'O' ; header [ 1 ] = ( byte ) 'g' ; header [ 2 ] = ( byte ) 'g' ; header [ 3 ] = ( byte ) 'S' ; header [ 4 ] = 0 ; byte flags = 0 ; if ( isContinue ) { flags += 1 ; } if ( isBOS ) { flags += 2 ; } if ( isEOS ) { flags += 4 ; } header [ 5 ] = flags ; IOUtils . putInt8 ( header , 6 , granulePosition ) ; IOUtils . putInt4 ( header , 14 , sid ) ; IOUtils . putInt4 ( header , 18 , seqNum ) ; header [ 26 ] = IOUtils . fromInt ( numLVs ) ; System . arraycopy ( lvs , 0 , header , MINIMUM_PAGE_SIZE , numLVs ) ; return header ; }
Gets the header but with a blank CRC field
1,093
protected OggStreamPacket createNext ( OggPacket packet ) { if ( type == OggStreamIdentifier . OGG_VORBIS ) { return VorbisPacketFactory . create ( packet ) ; } else if ( type == OggStreamIdentifier . SPEEX_AUDIO ) { return SpeexPacketFactory . create ( packet ) ; } else if ( type == OggStreamIdentifier . OPUS_AUDIO ) { return OpusPacketFactory . create ( packet ) ; } else if ( type == OggStreamIdentifier . OGG_FLAC ) { return null ; } else { throw new IllegalArgumentException ( "Unsupported stream of type " + type ) ; } }
Creates an appropriate high level packet
1,094
public boolean populate ( OggPacket packet ) { if ( type == OggStreamIdentifier . OGG_FLAC ) { if ( tags == null ) { tags = new FlacTags ( packet ) ; return true ; } else { return false ; } } OggStreamPacket sPacket = createNext ( packet ) ; if ( sPacket instanceof OggAudioTagsHeader ) { tags = ( OggAudioTagsHeader ) sPacket ; if ( type == OggStreamIdentifier . OGG_VORBIS ) { return true ; } else { return false ; } } if ( sPacket instanceof OggAudioSetupHeader ) { setup = ( OggAudioSetupHeader ) sPacket ; return false ; } throw new IllegalArgumentException ( "Expecting header packet but got " + sPacket ) ; }
Populates with the next header
1,095
public static long readUE7 ( InputStream stream ) throws IOException { int i ; long v = 0 ; while ( ( i = stream . read ( ) ) >= 0 ) { v = v << 7 ; if ( ( i & 128 ) == 128 ) { v += ( i & 127 ) ; } else { v += i ; break ; } } return v ; }
Gets the integer value that is stored in UTF - 8 like fashion in Big Endian but with the high bit on each number indicating if it continues or not
1,096
public static String removeNullPadding ( String str ) { int idx = str . indexOf ( 0 ) ; if ( idx == - 1 ) { return str ; } return str . substring ( 0 , idx ) ; }
Strips off any null padding if any from the string
1,097
public static void writeUTF8 ( OutputStream out , String str ) throws IOException { byte [ ] s = str . getBytes ( UTF8 ) ; out . write ( s ) ; }
Writes the string out as UTF - 8
1,098
public static boolean byteRangeMatches ( byte [ ] wanted , byte [ ] within , int withinOffset ) { for ( int i = 0 ; i < wanted . length ; i ++ ) { if ( wanted [ i ] != within [ i + withinOffset ] ) return false ; } return true ; }
Checks to see if the wanted byte pattern is found in the within bytes from the given offset
1,099
protected static MediaType toMediaType ( OggStreamType type ) { if ( type == OggStreamIdentifier . UNKNOWN ) { return OGG_GENERAL ; } else { return MediaType . parse ( type . mimetype ) ; } }
Converts from our type to Tika s type